Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
sqlmapproject
GitHub Repository: sqlmapproject/sqlmap
Path: blob/master/lib/request/connect.py
2989 views
1
#!/usr/bin/env python
2
3
"""
4
Copyright (c) 2006-2025 sqlmap developers (https://sqlmap.org)
5
See the file 'LICENSE' for copying permission
6
"""
7
8
import binascii
9
import inspect
10
import logging
11
import os
12
import random
13
import re
14
import socket
15
import string
16
import struct
17
import sys
18
import time
19
import traceback
20
21
try:
22
import websocket
23
from websocket import WebSocketException
24
except ImportError:
25
class WebSocketException(Exception):
26
pass
27
28
from lib.core.agent import agent
29
from lib.core.common import asciifyUrl
30
from lib.core.common import calculateDeltaSeconds
31
from lib.core.common import checkFile
32
from lib.core.common import checkSameHost
33
from lib.core.common import chunkSplitPostData
34
from lib.core.common import clearConsoleLine
35
from lib.core.common import dataToStdout
36
from lib.core.common import escapeJsonValue
37
from lib.core.common import evaluateCode
38
from lib.core.common import extractRegexResult
39
from lib.core.common import filterNone
40
from lib.core.common import findMultipartPostBoundary
41
from lib.core.common import getCurrentThreadData
42
from lib.core.common import getHeader
43
from lib.core.common import getHostHeader
44
from lib.core.common import getRequestHeader
45
from lib.core.common import getSafeExString
46
from lib.core.common import logHTTPTraffic
47
from lib.core.common import openFile
48
from lib.core.common import popValue
49
from lib.core.common import parseJson
50
from lib.core.common import pushValue
51
from lib.core.common import randomizeParameterValue
52
from lib.core.common import randomInt
53
from lib.core.common import randomStr
54
from lib.core.common import readInput
55
from lib.core.common import removeReflectiveValues
56
from lib.core.common import safeVariableNaming
57
from lib.core.common import singleTimeLogMessage
58
from lib.core.common import singleTimeWarnMessage
59
from lib.core.common import stdev
60
from lib.core.common import unArrayizeValue
61
from lib.core.common import unsafeVariableNaming
62
from lib.core.common import urldecode
63
from lib.core.common import urlencode
64
from lib.core.common import wasLastResponseDelayed
65
from lib.core.compat import LooseVersion
66
from lib.core.compat import patchHeaders
67
from lib.core.compat import xrange
68
from lib.core.convert import encodeBase64
69
from lib.core.convert import getBytes
70
from lib.core.convert import getText
71
from lib.core.convert import getUnicode
72
from lib.core.data import cmdLineOptions
73
from lib.core.data import conf
74
from lib.core.data import kb
75
from lib.core.data import logger
76
from lib.core.datatype import AttribDict
77
from lib.core.decorators import stackedmethod
78
from lib.core.dicts import POST_HINT_CONTENT_TYPES
79
from lib.core.enums import ADJUST_TIME_DELAY
80
from lib.core.enums import AUTH_TYPE
81
from lib.core.enums import CUSTOM_LOGGING
82
from lib.core.enums import HINT
83
from lib.core.enums import HTTP_HEADER
84
from lib.core.enums import HTTPMETHOD
85
from lib.core.enums import NULLCONNECTION
86
from lib.core.enums import PAYLOAD
87
from lib.core.enums import PLACE
88
from lib.core.enums import POST_HINT
89
from lib.core.enums import REDIRECTION
90
from lib.core.enums import WEB_PLATFORM
91
from lib.core.exception import SqlmapCompressionException
92
from lib.core.exception import SqlmapConnectionException
93
from lib.core.exception import SqlmapGenericException
94
from lib.core.exception import SqlmapMissingDependence
95
from lib.core.exception import SqlmapSkipTargetException
96
from lib.core.exception import SqlmapSyntaxException
97
from lib.core.exception import SqlmapTokenException
98
from lib.core.exception import SqlmapValueException
99
from lib.core.settings import ASTERISK_MARKER
100
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
101
from lib.core.settings import DEFAULT_CONTENT_TYPE
102
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
103
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
104
from lib.core.settings import DEFAULT_USER_AGENT
105
from lib.core.settings import EVALCODE_ENCODED_PREFIX
106
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
107
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
108
from lib.core.settings import IPS_WAF_CHECK_PAYLOAD
109
from lib.core.settings import IS_WIN
110
from lib.core.settings import JAVASCRIPT_HREF_REGEX
111
from lib.core.settings import LARGE_READ_TRIM_MARKER
112
from lib.core.settings import LIVE_COOKIES_TIMEOUT
113
from lib.core.settings import MIN_HTTPX_VERSION
114
from lib.core.settings import MAX_CONNECTION_READ_SIZE
115
from lib.core.settings import MAX_CONNECTIONS_REGEX
116
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
117
from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
118
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
119
from lib.core.settings import META_REFRESH_REGEX
120
from lib.core.settings import MAX_TIME_RESPONSES
121
from lib.core.settings import MIN_TIME_RESPONSES
122
from lib.core.settings import PAYLOAD_DELIMITER
123
from lib.core.settings import PERMISSION_DENIED_REGEX
124
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
125
from lib.core.settings import RANDOM_INTEGER_MARKER
126
from lib.core.settings import RANDOM_STRING_MARKER
127
from lib.core.settings import REPLACEMENT_MARKER
128
from lib.core.settings import SAFE_HEX_MARKER
129
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
130
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
131
from lib.core.settings import UNICODE_ENCODING
132
from lib.core.settings import URI_HTTP_HEADER
133
from lib.core.settings import WARN_TIME_STDEV
134
from lib.core.settings import WEBSOCKET_INITIAL_TIMEOUT
135
from lib.core.settings import YUGE_FACTOR
136
from lib.request.basic import decodePage
137
from lib.request.basic import forgeHeaders
138
from lib.request.basic import processResponse
139
from lib.request.comparison import comparison
140
from lib.request.direct import direct
141
from lib.request.methodrequest import MethodRequest
142
from lib.utils.safe2bin import safecharencode
143
from thirdparty import six
144
from thirdparty.odict import OrderedDict
145
from thirdparty.six import unichr as _unichr
146
from thirdparty.six.moves import http_client as _http_client
147
from thirdparty.six.moves import urllib as _urllib
148
from thirdparty.socks.socks import ProxyError
149
150
class Connect(object):
151
"""
152
This class defines methods used to perform HTTP requests
153
"""
154
155
@staticmethod
156
def _getPageProxy(**kwargs):
157
try:
158
if (len(inspect.stack()) > sys.getrecursionlimit() // 2): # Note: https://github.com/sqlmapproject/sqlmap/issues/4525
159
warnMsg = "unable to connect to the target URL"
160
raise SqlmapConnectionException(warnMsg)
161
except (TypeError, UnicodeError):
162
pass
163
164
try:
165
return Connect.getPage(**kwargs)
166
except RuntimeError:
167
return None, None, None
168
169
@staticmethod
170
def _retryProxy(**kwargs):
171
threadData = getCurrentThreadData()
172
threadData.retriesCount += 1
173
174
if conf.proxyList and threadData.retriesCount >= conf.retries and not kb.locks.handlers.locked():
175
warnMsg = "changing proxy"
176
logger.warning(warnMsg)
177
178
conf.proxy = None
179
threadData.retriesCount = 0
180
181
setHTTPHandlers()
182
183
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
184
# timed based payloads can cause web server unresponsiveness
185
# if the injectable piece of code is some kind of JOIN-like query
186
warnMsg = "most likely web server instance hasn't recovered yet "
187
warnMsg += "from previous timed based payload. If the problem "
188
warnMsg += "persists please wait for a few minutes and rerun "
189
warnMsg += "without flag 'T' in option '--technique' "
190
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
191
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
192
singleTimeWarnMessage(warnMsg)
193
194
elif kb.originalPage is None:
195
if conf.tor:
196
warnMsg = "please make sure that you have "
197
warnMsg += "Tor installed and running so "
198
warnMsg += "you could successfully use "
199
warnMsg += "switch '--tor' "
200
if IS_WIN:
201
warnMsg += "(e.g. 'https://www.torproject.org/download/')"
202
else:
203
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
204
else:
205
warnMsg = "if the problem persists please check that the provided "
206
warnMsg += "target URL is reachable"
207
208
items = []
209
if not conf.randomAgent:
210
items.append("switch '--random-agent'")
211
if not any((conf.proxy, conf.proxyFile, conf.tor)):
212
items.append("proxy switches ('--proxy', '--proxy-file'...)")
213
if items:
214
warnMsg += ". In case that it is, "
215
warnMsg += "you can try to rerun with "
216
warnMsg += " and/or ".join(items)
217
218
singleTimeWarnMessage(warnMsg)
219
220
elif conf.threads > 1:
221
warnMsg = "if the problem persists please try to lower "
222
warnMsg += "the number of used threads (option '--threads')"
223
singleTimeWarnMessage(warnMsg)
224
225
kwargs['retrying'] = True
226
return Connect._getPageProxy(**kwargs)
227
228
@staticmethod
229
def _connReadProxy(conn):
230
retVal = b""
231
232
if not kb.dnsMode and conn:
233
headers = conn.info()
234
if kb.pageCompress and headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
235
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
236
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
237
warnMsg = "large compressed response detected. Disabling compression"
238
singleTimeWarnMessage(warnMsg)
239
kb.pageCompress = False
240
raise SqlmapCompressionException
241
else:
242
while True:
243
if not conn:
244
break
245
else:
246
try:
247
part = conn.read(MAX_CONNECTION_READ_SIZE)
248
except AssertionError:
249
part = b""
250
251
if len(part) == MAX_CONNECTION_READ_SIZE:
252
warnMsg = "large response detected. This could take a while"
253
singleTimeWarnMessage(warnMsg)
254
part = re.sub(getBytes(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start)), getBytes("%s%s%s" % (kb.chars.stop, LARGE_READ_TRIM_MARKER, kb.chars.start)), part)
255
retVal += part
256
else:
257
retVal += part
258
break
259
260
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
261
warnMsg = "too large response detected. Automatically trimming it"
262
singleTimeWarnMessage(warnMsg)
263
break
264
265
if conf.yuge:
266
retVal = YUGE_FACTOR * retVal
267
268
return retVal
269
270
@staticmethod
271
def getPage(**kwargs):
272
"""
273
This method connects to the target URL or proxy and returns
274
the target URL page content
275
"""
276
277
if conf.offline:
278
return None, None, None
279
280
url = kwargs.get("url", None) or conf.url
281
get = kwargs.get("get", None)
282
post = kwargs.get("post", None)
283
method = kwargs.get("method", None)
284
cookie = kwargs.get("cookie", None)
285
ua = kwargs.get("ua", None) or conf.agent
286
referer = kwargs.get("referer", None) or conf.referer
287
direct_ = kwargs.get("direct", False)
288
multipart = kwargs.get("multipart", None)
289
silent = kwargs.get("silent", False)
290
raise404 = kwargs.get("raise404", True)
291
timeout = kwargs.get("timeout", None) or conf.timeout
292
auxHeaders = kwargs.get("auxHeaders", None)
293
response = kwargs.get("response", False)
294
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
295
refreshing = kwargs.get("refreshing", False)
296
retrying = kwargs.get("retrying", False)
297
crawling = kwargs.get("crawling", False)
298
checking = kwargs.get("checking", False)
299
skipRead = kwargs.get("skipRead", False)
300
finalCode = kwargs.get("finalCode", False)
301
chunked = kwargs.get("chunked", False) or conf.chunked
302
303
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
304
time.sleep(conf.delay)
305
306
start = time.time()
307
308
threadData = getCurrentThreadData()
309
with kb.locks.request:
310
kb.requestCounter += 1
311
threadData.lastRequestUID = kb.requestCounter
312
313
if conf.proxyFreq:
314
if kb.requestCounter % conf.proxyFreq == 0:
315
conf.proxy = None
316
317
warnMsg = "changing proxy"
318
logger.warning(warnMsg)
319
320
setHTTPHandlers()
321
322
if conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
323
if conf.murphyRate:
324
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
325
326
page, headers, code = randomStr(int(randomInt()), alphabet=[_unichr(_) for _ in xrange(256)]), None, None if not conf.murphyRate else randomInt(3)
327
328
threadData.lastPage = page
329
threadData.lastCode = code
330
331
return page, headers, code
332
333
if conf.liveCookies:
334
with kb.locks.liveCookies:
335
if not checkFile(conf.liveCookies, raiseOnError=False) or os.path.getsize(conf.liveCookies) == 0:
336
warnMsg = "[%s] [WARNING] live cookies file '%s' is empty or non-existent. Waiting for timeout (%d seconds)" % (time.strftime("%X"), conf.liveCookies, LIVE_COOKIES_TIMEOUT)
337
dataToStdout(warnMsg)
338
339
valid = False
340
for _ in xrange(LIVE_COOKIES_TIMEOUT):
341
if checkFile(conf.liveCookies, raiseOnError=False) and os.path.getsize(conf.liveCookies) > 0:
342
valid = True
343
break
344
else:
345
dataToStdout('.')
346
time.sleep(1)
347
348
dataToStdout("\n")
349
350
if not valid:
351
errMsg = "problem occurred while loading cookies from file '%s'" % conf.liveCookies
352
raise SqlmapValueException(errMsg)
353
354
cookie = openFile(conf.liveCookies).read().strip()
355
cookie = re.sub(r"(?i)\ACookie:\s*", "", cookie)
356
357
if multipart:
358
post = multipart
359
else:
360
if not post:
361
chunked = False
362
363
elif chunked:
364
post = _urllib.parse.unquote(post)
365
post = chunkSplitPostData(post)
366
367
webSocket = url.lower().startswith("ws")
368
369
if not _urllib.parse.urlsplit(url).netloc:
370
url = _urllib.parse.urljoin(conf.url, url)
371
372
# flag to know if we are dealing with the same target host
373
target = checkSameHost(url, conf.url)
374
375
if not retrying:
376
# Reset the number of connection retries
377
threadData.retriesCount = 0
378
379
# fix for known issue when urllib2 just skips the other part of provided
380
# url splitted with space char while urlencoding it in the later phase
381
url = url.replace(" ", "%20")
382
383
if "://" not in url:
384
url = "http://%s" % url
385
386
conn = None
387
page = None
388
code = None
389
status = None
390
391
_ = _urllib.parse.urlsplit(url)
392
requestMsg = u"HTTP request [#%d]:\r\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
393
requestMsg += getUnicode(("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url)
394
responseMsg = u"HTTP response "
395
requestHeaders = u""
396
responseHeaders = None
397
logHeaders = u""
398
skipLogTraffic = False
399
400
raise404 = raise404 and not kb.ignoreNotFound
401
402
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
403
# support those by default
404
url = asciifyUrl(url)
405
406
try:
407
socket.setdefaulttimeout(timeout)
408
409
if direct_:
410
if '?' in url:
411
url, params = url.split('?', 1)
412
params = urlencode(params)
413
url = "%s?%s" % (url, params)
414
415
elif any((refreshing, crawling, checking)):
416
pass
417
418
elif target:
419
if conf.forceSSL:
420
url = re.sub(r"(?i)\A(http|ws):", r"\g<1>s:", url)
421
url = re.sub(r"(?i):80/", ":443/", url)
422
423
if PLACE.GET in conf.parameters and not get:
424
get = conf.parameters[PLACE.GET]
425
426
if not conf.skipUrlEncode:
427
get = urlencode(get, limit=True)
428
429
if get:
430
if '?' in url:
431
url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
432
requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
433
else:
434
url = "%s?%s" % (url, get)
435
requestMsg += "?%s" % get
436
437
if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
438
post = conf.parameters[PLACE.POST]
439
440
elif get:
441
url = "%s?%s" % (url, get)
442
requestMsg += "?%s" % get
443
444
requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
445
446
# Prepare HTTP headers
447
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: getHeader(dict(conf.httpHeaders), HTTP_HEADER.HOST) or getHostHeader(url)}, base=None if target else {})
448
449
if HTTP_HEADER.COOKIE in headers:
450
cookie = headers[HTTP_HEADER.COOKIE]
451
452
if kb.authHeader:
453
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
454
455
if kb.proxyAuthHeader:
456
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
457
458
if not conf.requestFile or not target:
459
if not getHeader(headers, HTTP_HEADER.ACCEPT):
460
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
461
462
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
463
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
464
465
elif conf.requestFile and getHeader(headers, HTTP_HEADER.USER_AGENT) == DEFAULT_USER_AGENT:
466
for header in headers:
467
if header.upper() == HTTP_HEADER.USER_AGENT.upper():
468
del headers[header]
469
break
470
471
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
472
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE if unArrayizeValue(conf.base64Parameter) != HTTPMETHOD.POST else PLAIN_TEXT_CONTENT_TYPE)
473
474
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
475
warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
476
warnMsg += "Will try to reconstruct"
477
singleTimeWarnMessage(warnMsg)
478
479
boundary = findMultipartPostBoundary(conf.data)
480
if boundary:
481
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
482
483
if conf.keepAlive:
484
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
485
486
if chunked:
487
headers[HTTP_HEADER.TRANSFER_ENCODING] = "chunked"
488
489
if auxHeaders:
490
headers = forgeHeaders(auxHeaders, headers)
491
492
if kb.headersFile:
493
content = openFile(kb.headersFile, "rb").read()
494
for line in content.split("\n"):
495
line = getText(line.strip())
496
if ':' in line:
497
header, value = line.split(':', 1)
498
headers[header] = value
499
500
if conf.localhost:
501
headers[HTTP_HEADER.HOST] = "localhost"
502
503
for key, value in list(headers.items()):
504
if key.upper() == HTTP_HEADER.ACCEPT_ENCODING.upper():
505
value = re.sub(r"(?i)(,)br(,)?", lambda match: ',' if match.group(1) and match.group(2) else "", value) or "identity"
506
507
del headers[key]
508
if isinstance(value, six.string_types):
509
for char in (r"\r", r"\n"):
510
value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value)
511
headers[getBytes(key) if six.PY2 else key] = getBytes(value.strip("\r\n")) # Note: Python3 has_header() expects non-bytes value
512
513
if six.PY2:
514
url = getBytes(url) # Note: Python3 requires text while Python2 has problems when mixing text with binary POST
515
516
if webSocket:
517
ws = websocket.WebSocket()
518
ws.settimeout(WEBSOCKET_INITIAL_TIMEOUT if kb.webSocketRecvCount is None else timeout)
519
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
520
ws.send(urldecode(post or ""))
521
522
_page = []
523
524
if kb.webSocketRecvCount is None:
525
while True:
526
try:
527
_page.append(ws.recv())
528
except websocket.WebSocketTimeoutException:
529
kb.webSocketRecvCount = len(_page)
530
break
531
else:
532
for i in xrange(max(1, kb.webSocketRecvCount)):
533
_page.append(ws.recv())
534
535
page = "\n".join(_page)
536
537
ws.close()
538
code = ws.status
539
status = _http_client.responses[code]
540
541
class _(dict):
542
pass
543
544
responseHeaders = _(ws.getheaders())
545
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
546
547
requestHeaders += "\r\n".join(["%s: %s" % (u"-".join(_.capitalize() for _ in getUnicode(key).split(u'-')) if hasattr(key, "capitalize") else getUnicode(key), getUnicode(value)) for (key, value) in responseHeaders.items()])
548
requestMsg += "\r\n%s" % requestHeaders
549
550
if post is not None:
551
requestMsg += "\r\n\r\n%s" % getUnicode(post)
552
553
requestMsg += "\r\n"
554
555
threadData.lastRequestMsg = requestMsg
556
557
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
558
else:
559
post = getBytes(post)
560
561
if unArrayizeValue(conf.base64Parameter) == HTTPMETHOD.POST:
562
if kb.place != HTTPMETHOD.POST:
563
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
564
else:
565
post = urldecode(post, convall=True)
566
post = encodeBase64(post)
567
568
if target and cmdLineOptions.method or method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
569
req = MethodRequest(url, post, headers)
570
req.set_method(cmdLineOptions.method or method)
571
elif url is not None:
572
req = _urllib.request.Request(url, post, headers)
573
else:
574
return None, None, None
575
576
for function in kb.preprocessFunctions:
577
try:
578
function(req)
579
except Exception as ex:
580
errMsg = "error occurred while running preprocess "
581
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
582
raise SqlmapGenericException(errMsg)
583
else:
584
post, headers = req.data, req.headers
585
586
requestHeaders += "\r\n".join(["%s: %s" % (u"-".join(_.capitalize() for _ in getUnicode(key).split(u'-')) if hasattr(key, "capitalize") else getUnicode(key), getUnicode(value)) for (key, value) in req.header_items()])
587
588
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
589
conf.cj._policy._now = conf.cj._now = int(time.time())
590
with conf.cj._cookies_lock:
591
cookies = conf.cj._cookies_for_request(req)
592
requestHeaders += "\r\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
593
594
if post is not None:
595
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH) and not chunked:
596
requestHeaders += "\r\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
597
598
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
599
requestHeaders += "\r\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive")
600
601
requestMsg += "\r\n%s" % requestHeaders
602
603
if post is not None:
604
requestMsg += "\r\n\r\n%s" % getUnicode(post)
605
606
if not chunked:
607
requestMsg += "\r\n"
608
609
if conf.cj:
610
for cookie in conf.cj:
611
if cookie.value is None:
612
cookie.value = ""
613
else:
614
for char in (r"\r", r"\n"):
615
cookie.value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", cookie.value)
616
617
if conf.http2:
618
try:
619
import httpx
620
except ImportError:
621
raise SqlmapMissingDependence("httpx[http2] not available (e.g. 'pip%s install httpx[http2]')" % ('3' if six.PY3 else ""))
622
623
if LooseVersion(httpx.__version__) < LooseVersion(MIN_HTTPX_VERSION):
624
raise SqlmapMissingDependence("outdated version of httpx detected (%s<%s)" % (httpx.__version__, MIN_HTTPX_VERSION))
625
626
try:
627
proxy_mounts = dict(("%s://" % key, httpx.HTTPTransport(proxy="%s%s" % ("http://" if "://" not in kb.proxies[key] else "", kb.proxies[key]))) for key in kb.proxies) if kb.proxies else None
628
with httpx.Client(verify=False, http2=True, timeout=timeout, follow_redirects=True, cookies=conf.cj, mounts=proxy_mounts) as client:
629
conn = client.request(method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET), url, headers=headers, data=post)
630
except (httpx.HTTPError, httpx.InvalidURL, httpx.CookieConflict, httpx.StreamError) as ex:
631
raise _http_client.HTTPException(getSafeExString(ex))
632
else:
633
conn.code = conn.status_code
634
conn.msg = conn.reason_phrase
635
conn.info = lambda c=conn: c.headers
636
637
conn._read_buffer = conn.read()
638
conn._read_offset = 0
639
640
requestMsg = re.sub(" HTTP/[0-9.]+\r\n", " %s\r\n" % conn.http_version, requestMsg, count=1)
641
642
if not multipart:
643
threadData.lastRequestMsg = requestMsg
644
645
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
646
647
def _read(count=None):
648
offset = conn._read_offset
649
if count is None:
650
result = conn._read_buffer[offset:]
651
conn._read_offset = len(conn._read_buffer)
652
else:
653
result = conn._read_buffer[offset: offset + count]
654
conn._read_offset += len(result)
655
return result
656
657
conn.read = _read
658
else:
659
if not multipart:
660
threadData.lastRequestMsg = requestMsg
661
662
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
663
664
conn = _urllib.request.urlopen(req)
665
666
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
667
kb.authHeader = getUnicode(getRequestHeader(req, HTTP_HEADER.AUTHORIZATION))
668
669
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
670
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
671
672
# Return response object
673
if response:
674
return conn, None, None
675
676
# Get HTTP response
677
if hasattr(conn, "redurl"):
678
page = (threadData.lastRedirectMsg[1] if kb.choices.redirect == REDIRECTION.NO else Connect._connReadProxy(conn)) if not skipRead else None
679
skipLogTraffic = kb.choices.redirect == REDIRECTION.NO
680
code = conn.redcode if not finalCode else code
681
else:
682
page = Connect._connReadProxy(conn) if not skipRead else None
683
684
if conn:
685
code = (code or conn.code) if conn.code == kb.originalCode else conn.code # do not override redirection code (for comparison purposes)
686
responseHeaders = conn.info()
687
responseHeaders[URI_HTTP_HEADER] = conn.geturl() if hasattr(conn, "geturl") else url
688
689
if getattr(conn, "redurl", None) is not None:
690
responseHeaders[HTTP_HEADER.LOCATION] = conn.redurl
691
692
responseHeaders = patchHeaders(responseHeaders)
693
kb.serverHeader = responseHeaders.get(HTTP_HEADER.SERVER, kb.serverHeader)
694
else:
695
code = None
696
responseHeaders = {}
697
698
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
699
status = getUnicode(conn.msg) if conn and getattr(conn, "msg", None) else None
700
701
kb.connErrorCounter = 0
702
703
if not refreshing:
704
refresh = responseHeaders.get(HTTP_HEADER.REFRESH, "").split("url=")[-1].strip()
705
706
if extractRegexResult(META_REFRESH_REGEX, page):
707
refresh = extractRegexResult(META_REFRESH_REGEX, page)
708
709
debugMsg = "got HTML meta refresh header"
710
logger.debug(debugMsg)
711
712
if not refresh:
713
refresh = extractRegexResult(JAVASCRIPT_HREF_REGEX, page)
714
715
if refresh:
716
debugMsg = "got Javascript redirect logic"
717
logger.debug(debugMsg)
718
719
if refresh:
720
if kb.alwaysRefresh is None:
721
msg = "got a refresh intent "
722
msg += "(redirect like response common to login pages) to '%s'. " % refresh
723
msg += "Do you want to apply it from now on? [Y/n]"
724
725
kb.alwaysRefresh = readInput(msg, default='Y', boolean=True)
726
727
if kb.alwaysRefresh:
728
if re.search(r"\Ahttps?://", refresh, re.I):
729
url = refresh
730
else:
731
url = _urllib.parse.urljoin(url, refresh)
732
733
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
734
kwargs["refreshing"] = True
735
kwargs["url"] = url
736
kwargs["get"] = None
737
kwargs["post"] = None
738
739
try:
740
return Connect._getPageProxy(**kwargs)
741
except SqlmapSyntaxException:
742
pass
743
744
# Explicit closing of connection object
745
if conn and not conf.keepAlive:
746
try:
747
if hasattr(conn, "fp") and hasattr(conn.fp, '_sock'):
748
conn.fp._sock.close()
749
conn.close()
750
except Exception as ex:
751
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
752
logger.warning(warnMsg)
753
754
except SqlmapConnectionException as ex:
755
if conf.proxyList and not kb.threadException:
756
warnMsg = "unable to connect to the target URL ('%s')" % getSafeExString(ex)
757
logger.critical(warnMsg)
758
threadData.retriesCount = conf.retries
759
return Connect._retryProxy(**kwargs)
760
else:
761
raise
762
763
except _urllib.error.HTTPError as ex:
764
page = None
765
responseHeaders = None
766
767
if checking:
768
return None, None, None
769
770
try:
771
page = ex.read() if not skipRead else None
772
responseHeaders = ex.info()
773
responseHeaders[URI_HTTP_HEADER] = ex.geturl()
774
responseHeaders = patchHeaders(responseHeaders)
775
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
776
except socket.timeout:
777
warnMsg = "connection timed out while trying "
778
warnMsg += "to get error page information (%d)" % ex.code
779
logger.warning(warnMsg)
780
return None, None, None
781
except KeyboardInterrupt:
782
raise
783
except:
784
pass
785
finally:
786
page = getUnicode(page)
787
788
code = ex.code
789
status = getUnicode(getattr(ex, "reason", None) or getSafeExString(ex).split(": ", 1)[-1])
790
791
kb.originalCode = kb.originalCode or code
792
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
793
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
794
795
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
796
797
if responseHeaders and getattr(responseHeaders, "headers", None):
798
logHeaders = "".join(getUnicode(responseHeaders.headers)).strip()
799
800
logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time())
801
802
skipLogTraffic = True
803
804
if conf.verbose <= 5:
805
responseMsg += getUnicode(logHeaders)
806
elif conf.verbose > 5:
807
responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE])
808
809
if not multipart:
810
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
811
812
if code in conf.abortCode:
813
errMsg = "aborting due to detected HTTP code '%d'" % code
814
singleTimeLogMessage(errMsg, logging.CRITICAL)
815
raise SystemExit
816
817
if ex.code not in (conf.ignoreCode or []):
818
if ex.code == _http_client.UNAUTHORIZED:
819
errMsg = "not authorized, try to provide right HTTP "
820
errMsg += "authentication type and valid credentials (%d). " % code
821
errMsg += "If this is intended, try to rerun by providing "
822
errMsg += "a valid value for option '--ignore-code'"
823
raise SqlmapConnectionException(errMsg)
824
elif chunked and ex.code in (_http_client.METHOD_NOT_ALLOWED, _http_client.LENGTH_REQUIRED):
825
warnMsg = "turning off HTTP chunked transfer encoding "
826
warnMsg += "as it seems that the target site doesn't support it (%d)" % code
827
singleTimeWarnMessage(warnMsg)
828
conf.chunked = kwargs["chunked"] = False
829
return Connect.getPage(**kwargs)
830
elif ex.code == _http_client.REQUEST_URI_TOO_LONG:
831
warnMsg = "request URI is marked as too long by the target. "
832
warnMsg += "you are advised to try a switch '--no-cast' and/or '--no-escape'"
833
singleTimeWarnMessage(warnMsg)
834
elif ex.code == _http_client.NOT_FOUND:
835
if raise404:
836
errMsg = "page not found (%d)" % code
837
raise SqlmapConnectionException(errMsg)
838
else:
839
debugMsg = "page not found (%d)" % code
840
singleTimeLogMessage(debugMsg, logging.DEBUG)
841
elif ex.code == _http_client.GATEWAY_TIMEOUT:
842
if ignoreTimeout:
843
return None if not conf.ignoreTimeouts else "", None, None
844
else:
845
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, _http_client.responses[ex.code])
846
if threadData.retriesCount < conf.retries and not kb.threadException:
847
warnMsg += ". sqlmap is going to retry the request"
848
logger.critical(warnMsg)
849
return Connect._retryProxy(**kwargs)
850
elif kb.testMode:
851
logger.critical(warnMsg)
852
return None, None, None
853
else:
854
raise SqlmapConnectionException(warnMsg)
855
else:
856
debugMsg = "got HTTP error code: %d ('%s')" % (code, status)
857
logger.debug(debugMsg)
858
859
except (_urllib.error.URLError, socket.error, socket.timeout, _http_client.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError, OverflowError, AttributeError, OSError, AssertionError, KeyError):
860
tbMsg = traceback.format_exc()
861
862
if conf.debug:
863
dataToStdout(tbMsg)
864
865
if checking:
866
return None, None, None
867
elif "KeyError:" in tbMsg:
868
if "content-length" in tbMsg:
869
return None, None, None
870
else:
871
raise
872
elif "AttributeError:" in tbMsg:
873
if "WSAECONNREFUSED" in tbMsg:
874
return None, None, None
875
else:
876
raise
877
elif "no host given" in tbMsg:
878
warnMsg = "invalid URL address used (%s)" % repr(url)
879
raise SqlmapSyntaxException(warnMsg)
880
elif any(_ in tbMsg for _ in ("forcibly closed", "Connection is already closed", "ConnectionAbortedError")):
881
warnMsg = "connection was forcibly closed by the target URL"
882
elif "timed out" in tbMsg:
883
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
884
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is dropping 'suspicious' requests")
885
kb.droppingRequests = True
886
warnMsg = "connection timed out to the target URL"
887
elif "Connection reset" in tbMsg:
888
if not conf.disablePrecon:
889
singleTimeWarnMessage("turning off pre-connect mechanism because of connection reset(s)")
890
conf.disablePrecon = True
891
892
if kb.testMode:
893
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is resetting 'suspicious' requests")
894
kb.droppingRequests = True
895
warnMsg = "connection reset to the target URL"
896
elif "URLError" in tbMsg or "error" in tbMsg:
897
warnMsg = "unable to connect to the target URL"
898
match = re.search(r"Errno \d+\] ([^>\n]+)", tbMsg)
899
if match:
900
warnMsg += " ('%s')" % match.group(1).strip()
901
elif "NTLM" in tbMsg:
902
warnMsg = "there has been a problem with NTLM authentication"
903
elif "Invalid header name" in tbMsg: # (e.g. PostgreSQL ::Text payload)
904
return None, None, None
905
elif "BadStatusLine" in tbMsg:
906
warnMsg = "connection dropped or unknown HTTP "
907
warnMsg += "status code received"
908
if not conf.agent and not conf.randomAgent:
909
warnMsg += ". Try to force the HTTP User-Agent "
910
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
911
elif "IncompleteRead" in tbMsg:
912
warnMsg = "there was an incomplete read error while retrieving data "
913
warnMsg += "from the target URL"
914
elif "Handshake status" in tbMsg:
915
status = re.search(r"Handshake status ([\d]{3})", tbMsg)
916
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
917
raise SqlmapConnectionException(errMsg)
918
elif "SqlmapCompressionException" in tbMsg:
919
warnMsg = "problems with response (de)compression"
920
retrying = True
921
else:
922
warnMsg = "unable to connect to the target URL"
923
924
if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)):
925
warnMsg += " or proxy"
926
927
if silent:
928
return None, None, None
929
930
with kb.locks.connError:
931
kb.connErrorCounter += 1
932
933
if kb.connErrorCounter >= MAX_CONSECUTIVE_CONNECTION_ERRORS and kb.choices.connError is None:
934
message = "there seems to be a continuous problem with connection to the target. "
935
message += "Are you sure that you want to continue? [y/N] "
936
937
kb.choices.connError = readInput(message, default='N', boolean=True)
938
939
if kb.choices.connError is False:
940
raise SqlmapSkipTargetException
941
942
if "forcibly closed" in tbMsg:
943
logger.critical(warnMsg)
944
return None, None, None
945
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead", "Interrupted system call")):
946
return None if not conf.ignoreTimeouts else "", None, None
947
elif threadData.retriesCount < conf.retries and not kb.threadException:
948
warnMsg += ". sqlmap is going to retry the request"
949
if not retrying:
950
warnMsg += "(s)"
951
logger.critical(warnMsg)
952
else:
953
logger.debug(warnMsg)
954
return Connect._retryProxy(**kwargs)
955
elif kb.testMode or kb.multiThreadMode:
956
logger.critical(warnMsg)
957
return None, None, None
958
else:
959
raise SqlmapConnectionException(warnMsg)
960
961
finally:
962
for function in kb.postprocessFunctions:
963
try:
964
page, responseHeaders, code = function(page, responseHeaders, code)
965
except Exception as ex:
966
errMsg = "error occurred while running postprocess "
967
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
968
raise SqlmapGenericException(errMsg)
969
970
if isinstance(page, six.binary_type):
971
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
972
page = six.text_type(page, errors="ignore")
973
else:
974
page = getUnicode(page)
975
976
for _ in (getattr(conn, "redcode", None), code):
977
if _ is not None and _ in conf.abortCode:
978
errMsg = "aborting due to detected HTTP code '%d'" % _
979
singleTimeLogMessage(errMsg, logging.CRITICAL)
980
raise SystemExit
981
982
threadData.lastPage = page
983
threadData.lastCode = code
984
985
socket.setdefaulttimeout(conf.timeout)
986
987
# Dirty patch for Python3.11.0a7 (e.g. https://github.com/sqlmapproject/sqlmap/issues/5091)
988
if not sys.version.startswith("3.11."):
989
if conf.retryOn and re.search(conf.retryOn, page, re.I):
990
if threadData.retriesCount < conf.retries:
991
warnMsg = "forced retry of the request because of undesired page content"
992
logger.warning(warnMsg)
993
return Connect._retryProxy(**kwargs)
994
995
processResponse(page, responseHeaders, code, status)
996
997
if not skipLogTraffic:
998
if conn and getattr(conn, "redurl", None):
999
_ = _urllib.parse.urlsplit(conn.redurl)
1000
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
1001
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", r"\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
1002
1003
if kb.resendPostOnRedirect is False:
1004
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", r"\g<1>GET ", requestMsg)
1005
requestMsg = re.sub(r"(?i)Content-length: \d+\n", "", requestMsg)
1006
requestMsg = re.sub(r"(?s)\n\n.+", "\n", requestMsg)
1007
1008
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
1009
elif "\n" not in responseMsg:
1010
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
1011
1012
if responseHeaders:
1013
logHeaders = "".join(getUnicode(responseHeaders.headers)).strip()
1014
1015
logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time())
1016
1017
if conf.verbose <= 5:
1018
responseMsg += getUnicode(logHeaders)
1019
elif conf.verbose > 5:
1020
responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE])
1021
1022
if not multipart:
1023
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
1024
1025
return page, responseHeaders, code
1026
1027
@staticmethod
1028
@stackedmethod
1029
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True, disableTampering=False, ignoreSecondOrder=False):
1030
"""
1031
This method calls a function to get the target URL page content
1032
and returns its page ratio (0 <= ratio <= 1) or a boolean value
1033
representing False/True match in case of !getRatioValue
1034
"""
1035
1036
if conf.direct:
1037
return direct(value, content)
1038
1039
get = None
1040
post = None
1041
cookie = None
1042
ua = None
1043
referer = None
1044
host = None
1045
page = None
1046
pageLength = None
1047
uri = None
1048
code = None
1049
1050
if not place:
1051
place = kb.injection.place or PLACE.GET
1052
1053
kb.place = place
1054
1055
if not auxHeaders:
1056
auxHeaders = {}
1057
1058
raise404 = place != PLACE.URI if raise404 is None else raise404
1059
method = method or conf.method
1060
1061
postUrlEncode = kb.postUrlEncode
1062
1063
value = agent.adjustLateValues(value)
1064
payload = agent.extractPayload(value)
1065
threadData = getCurrentThreadData()
1066
1067
if conf.httpHeaders:
1068
headers = OrderedDict(conf.httpHeaders)
1069
contentType = max(headers[_] or "" if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else "" for _ in headers) or None
1070
1071
if (kb.postHint or conf.skipUrlEncode) and postUrlEncode:
1072
postUrlEncode = False
1073
if not (conf.skipUrlEncode and contentType): # NOTE: https://github.com/sqlmapproject/sqlmap/issues/5092
1074
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
1075
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
1076
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
1077
if "urlencoded" in contentType:
1078
postUrlEncode = True
1079
1080
if payload:
1081
delimiter = conf.paramDel or (DEFAULT_GET_POST_DELIMITER if place != PLACE.COOKIE else DEFAULT_COOKIE_DELIMITER)
1082
1083
if not disableTampering and kb.tamperFunctions:
1084
for function in kb.tamperFunctions:
1085
hints = {}
1086
1087
try:
1088
payload = function(payload=payload, headers=auxHeaders, delimiter=delimiter, hints=hints)
1089
except Exception as ex:
1090
errMsg = "error occurred while running tamper "
1091
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
1092
raise SqlmapGenericException(errMsg)
1093
1094
if not isinstance(payload, six.string_types):
1095
errMsg = "tamper function '%s' returns " % function.__name__
1096
errMsg += "invalid payload type ('%s')" % type(payload)
1097
raise SqlmapValueException(errMsg)
1098
1099
value = agent.replacePayload(value, payload)
1100
1101
if hints:
1102
if HINT.APPEND in hints:
1103
value = "%s%s%s" % (value, delimiter, hints[HINT.APPEND])
1104
1105
if HINT.PREPEND in hints:
1106
if place == PLACE.URI:
1107
match = re.search(r"\w+\s*=\s*%s" % PAYLOAD_DELIMITER, value) or re.search(r"[^?%s/]=\s*%s" % (re.escape(delimiter), PAYLOAD_DELIMITER), value)
1108
if match:
1109
value = value.replace(match.group(0), "%s%s%s" % (hints[HINT.PREPEND], delimiter, match.group(0)))
1110
else:
1111
value = "%s%s%s" % (hints[HINT.PREPEND], delimiter, value)
1112
1113
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
1114
1115
if place == PLACE.CUSTOM_POST and kb.postHint:
1116
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
1117
# payloads in SOAP/XML should have chars > and < replaced
1118
# with their HTML encoded counterparts
1119
payload = payload.replace("&#", SAFE_HEX_MARKER)
1120
payload = payload.replace('&', "&amp;").replace('>', "&gt;").replace('<', "&lt;").replace('"', "&quot;").replace("'", "&apos;") # Reference: https://stackoverflow.com/a/1091953
1121
payload = payload.replace(SAFE_HEX_MARKER, "&#")
1122
elif kb.postHint == POST_HINT.JSON:
1123
payload = escapeJsonValue(payload)
1124
elif kb.postHint == POST_HINT.JSON_LIKE:
1125
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
1126
payload = escapeJsonValue(payload)
1127
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
1128
value = agent.replacePayload(value, payload)
1129
else:
1130
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
1131
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper()) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
1132
skip = False
1133
1134
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper():
1135
if kb.choices.cookieEncode is None:
1136
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
1137
kb.choices.cookieEncode = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N', boolean=True)
1138
if not kb.choices.cookieEncode:
1139
skip = True
1140
1141
if not skip:
1142
if place in (PLACE.POST, PLACE.CUSTOM_POST): # potential problems in other cases (e.g. URL encoding of whole URI - including path)
1143
value = urlencode(value, spaceplus=kb.postSpaceToPlus)
1144
payload = urlencode(payload, safe='%', spaceplus=kb.postSpaceToPlus)
1145
value = agent.replacePayload(value, payload)
1146
postUrlEncode = False
1147
1148
if conf.hpp:
1149
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_PLATFORM.ASP, WEB_PLATFORM.ASPX)):
1150
warnMsg = "HTTP parameter pollution should work only against "
1151
warnMsg += "ASP(.NET) targets"
1152
singleTimeWarnMessage(warnMsg)
1153
if place in (PLACE.GET, PLACE.POST):
1154
_ = re.escape(PAYLOAD_DELIMITER)
1155
match = re.search(r"(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
1156
if match:
1157
payload = match.group("value")
1158
1159
for splitter in (urlencode(' '), ' '):
1160
if splitter in payload:
1161
prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*"))
1162
parts = payload.split(splitter)
1163
parts[0] = "%s%s" % (parts[0], suffix)
1164
parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1])
1165
for i in xrange(1, len(parts) - 1):
1166
parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix)
1167
payload = "".join(parts)
1168
1169
for splitter in (urlencode(','), ','):
1170
payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")))
1171
1172
value = agent.replacePayload(value, payload)
1173
else:
1174
warnMsg = "HTTP parameter pollution works only with regular "
1175
warnMsg += "GET and POST parameters"
1176
singleTimeWarnMessage(warnMsg)
1177
1178
if place:
1179
value = agent.removePayloadDelimiters(value)
1180
1181
if PLACE.GET in conf.parameters:
1182
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
1183
elif place == PLACE.GET: # Note: for (e.g.) checkWaf() when there are no GET parameters
1184
get = value
1185
1186
if PLACE.POST in conf.parameters:
1187
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
1188
elif place == PLACE.POST:
1189
post = value
1190
1191
if PLACE.CUSTOM_POST in conf.parameters:
1192
post = conf.parameters[PLACE.CUSTOM_POST].replace(kb.customInjectionMark, "") if place != PLACE.CUSTOM_POST or not value else value
1193
post = post.replace(ASTERISK_MARKER, '*') if post else post
1194
1195
if PLACE.COOKIE in conf.parameters:
1196
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
1197
1198
if PLACE.USER_AGENT in conf.parameters:
1199
ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value
1200
1201
if PLACE.REFERER in conf.parameters:
1202
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
1203
1204
if PLACE.HOST in conf.parameters:
1205
host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value
1206
1207
if PLACE.URI in conf.parameters:
1208
uri = conf.url if place != PLACE.URI or not value else value
1209
else:
1210
uri = conf.url
1211
1212
if value and place == PLACE.CUSTOM_HEADER:
1213
if value.split(',')[0].capitalize() == PLACE.COOKIE:
1214
cookie = value.split(',', 1)[-1]
1215
else:
1216
auxHeaders[value.split(',')[0]] = value.split(',', 1)[-1]
1217
1218
if conf.csrfToken:
1219
token = AttribDict()
1220
1221
def _adjustParameter(paramString, parameter, newValue):
1222
retVal = paramString
1223
1224
if urlencode(parameter) in paramString:
1225
parameter = urlencode(parameter)
1226
1227
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString, re.I)
1228
if match:
1229
retVal = re.sub(r"(?i)%s" % re.escape(match.group(0)), ("%s=%s" % (parameter, newValue)).replace('\\', r'\\'), paramString)
1230
else:
1231
match = re.search(r"(%s[\"']\s*:\s*[\"'])([^\"']*)" % re.escape(parameter), paramString, re.I)
1232
if match:
1233
retVal = re.sub(r"(?i)%s" % re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
1234
1235
return retVal
1236
1237
for attempt in xrange(conf.csrfRetries + 1):
1238
if token:
1239
break
1240
1241
if attempt > 0:
1242
warnMsg = "unable to find anti-CSRF token '%s' at '%s'" % (conf.csrfToken._original, conf.csrfUrl or conf.url)
1243
warnMsg += ". sqlmap is going to retry the request"
1244
logger.warning(warnMsg)
1245
1246
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, post=conf.csrfData or (conf.data if conf.csrfUrl == conf.url and (conf.csrfMethod or "").upper() == HTTPMETHOD.POST else None), method=conf.csrfMethod or (conf.method if conf.csrfUrl == conf.url else None), cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
1247
page = urldecode(page) # for anti-CSRF tokens with special characters in their name (e.g. 'foo:bar=...')
1248
1249
match = re.search(r"(?i)<input[^>]+\bname=[\"']?(?P<name>%s)\b[^>]*\bvalue=[\"']?(?P<value>[^>'\"]*)" % conf.csrfToken, page or "", re.I)
1250
1251
if not match:
1252
match = re.search(r"(?i)<input[^>]+\bvalue=[\"']?(?P<value>[^>'\"]*)[\"']?[^>]*\bname=[\"']?(?P<name>%s)\b" % conf.csrfToken, page or "", re.I)
1253
1254
if not match:
1255
match = re.search(r"(?P<name>%s)[\"']:[\"'](?P<value>[^\"']+)" % conf.csrfToken, page or "", re.I)
1256
1257
if not match:
1258
match = re.search(r"\b(?P<name>%s)\s*[:=]\s*(?P<value>\w+)" % conf.csrfToken, getUnicode(headers), re.I)
1259
1260
if not match:
1261
match = re.search(r"\b(?P<name>%s)\s*=\s*['\"]?(?P<value>[^;'\"]+)" % conf.csrfToken, page or "", re.I)
1262
1263
if not match:
1264
match = re.search(r"<meta\s+name=[\"']?(?P<name>%s)[\"']?[^>]+\b(value|content)=[\"']?(?P<value>[^>\"']+)" % conf.csrfToken, page or "", re.I)
1265
1266
if match:
1267
token.name, token.value = match.group("name"), match.group("value")
1268
1269
match = re.search(r"String\.fromCharCode\(([\d+, ]+)\)", token.value)
1270
if match:
1271
token.value = "".join(_unichr(int(_)) for _ in match.group(1).replace(' ', "").split(','))
1272
1273
if not token:
1274
if conf.csrfUrl and conf.csrfToken and conf.csrfUrl != conf.url and code == _http_client.OK:
1275
if headers and PLAIN_TEXT_CONTENT_TYPE in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
1276
token.name = conf.csrfToken
1277
token.value = page
1278
1279
if not token and conf.cj and any(re.search(conf.csrfToken, _.name, re.I) for _ in conf.cj):
1280
for _ in conf.cj:
1281
if re.search(conf.csrfToken, _.name, re.I):
1282
token.name, token.value = _.name, _.value
1283
if not any(re.search(conf.csrfToken, ' '.join(_), re.I) for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
1284
if post:
1285
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
1286
elif get:
1287
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
1288
else:
1289
get = "%s=%s" % (token.name, token.value)
1290
break
1291
1292
if not token:
1293
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken._original, conf.csrfUrl or conf.url)
1294
if not conf.csrfUrl:
1295
errMsg += ". You can try to rerun by providing "
1296
errMsg += "a valid value for option '--csrf-url'"
1297
raise SqlmapTokenException(errMsg)
1298
1299
if token:
1300
token.value = token.value.strip("'\"")
1301
1302
for candidate in (PLACE.GET, PLACE.POST, PLACE.CUSTOM_POST, PLACE.URI):
1303
if candidate in conf.parameters:
1304
if candidate == PLACE.URI and uri:
1305
uri = _adjustParameter(uri, token.name, token.value)
1306
elif candidate == PLACE.GET and get:
1307
get = _adjustParameter(get, token.name, token.value)
1308
elif candidate in (PLACE.POST, PLACE.CUSTOM_POST) and post:
1309
post = _adjustParameter(post, token.name, token.value)
1310
1311
for i in xrange(len(conf.httpHeaders)):
1312
if conf.httpHeaders[i][0].lower() == token.name.lower():
1313
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token.value)
1314
1315
if conf.rParam:
1316
def _randomizeParameter(paramString, randomParameter):
1317
retVal = paramString
1318
match = re.search(r"(\A|\b)%s=(?P<value>[^&;]*)" % re.escape(randomParameter), paramString)
1319
if match:
1320
origValue = match.group("value")
1321
newValue = randomizeParameterValue(origValue) if randomParameter not in kb.randomPool else random.sample(kb.randomPool[randomParameter], 1)[0]
1322
retVal = re.sub(r"(\A|\b)%s=[^&;]*" % re.escape(randomParameter), "%s=%s" % (randomParameter, newValue), paramString)
1323
else:
1324
match = re.search(r"(\A|\b)(%s\b[^\w]+)(?P<value>\w+)" % re.escape(randomParameter), paramString)
1325
if match:
1326
origValue = match.group("value")
1327
newValue = randomizeParameterValue(origValue) if randomParameter not in kb.randomPool else random.sample(kb.randomPool[randomParameter], 1)[0]
1328
retVal = paramString.replace(match.group(0), "%s%s" % (match.group(2), newValue))
1329
return retVal
1330
1331
for randomParameter in conf.rParam:
1332
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST):
1333
if item in conf.parameters:
1334
if item == PLACE.GET and get:
1335
get = _randomizeParameter(get, randomParameter)
1336
elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post:
1337
post = _randomizeParameter(post, randomParameter)
1338
elif item == PLACE.COOKIE and cookie:
1339
cookie = _randomizeParameter(cookie, randomParameter)
1340
elif item == PLACE.URI and uri:
1341
uri = _randomizeParameter(uri, randomParameter)
1342
1343
if conf.evalCode:
1344
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
1345
variables = {"uri": uri, "lastPage": threadData.lastPage, "_locals": locals(), "cookie": cookie}
1346
originals = {}
1347
1348
if not get and PLACE.URI in conf.parameters:
1349
query = _urllib.parse.urlsplit(uri).query or ""
1350
else:
1351
query = None
1352
1353
for item in filterNone((get, post if not kb.postHint else None, query)):
1354
for part in item.split(delimiter):
1355
if '=' in part:
1356
name, value = part.split('=', 1)
1357
name = name.strip()
1358
if safeVariableNaming(name) != name:
1359
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
1360
name = safeVariableNaming(name)
1361
value = urldecode(value, convall=True, spaceplus=(item == post and kb.postSpaceToPlus))
1362
variables[name] = value
1363
1364
if post and kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
1365
for name, value in (parseJson(post) or {}).items():
1366
if safeVariableNaming(name) != name:
1367
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
1368
name = safeVariableNaming(name)
1369
variables[name] = value
1370
1371
if cookie:
1372
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
1373
if '=' in part:
1374
name, value = part.split('=', 1)
1375
name = name.strip()
1376
if safeVariableNaming(name) != name:
1377
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
1378
name = safeVariableNaming(name)
1379
value = urldecode(value, convall=True)
1380
variables[name] = value
1381
1382
while True:
1383
try:
1384
compile(getBytes(re.sub(r"\s*;\s*", "\n", conf.evalCode)), "", "exec")
1385
except SyntaxError as ex:
1386
if ex.text:
1387
original = replacement = getUnicode(ex.text.strip())
1388
1389
if '=' in original:
1390
name, value = original.split('=', 1)
1391
name = name.strip()
1392
if safeVariableNaming(name) != name:
1393
replacement = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), replacement)
1394
else:
1395
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
1396
if safeVariableNaming(_) != _:
1397
replacement = replacement.replace(_, safeVariableNaming(_))
1398
break
1399
1400
if original == replacement:
1401
conf.evalCode = conf.evalCode.replace(EVALCODE_ENCODED_PREFIX, "")
1402
break
1403
else:
1404
conf.evalCode = conf.evalCode.replace(getUnicode(ex.text.strip(), UNICODE_ENCODING), replacement)
1405
else:
1406
break
1407
else:
1408
break
1409
1410
originals.update(variables)
1411
evaluateCode(conf.evalCode, variables)
1412
1413
for variable in list(variables.keys()):
1414
if unsafeVariableNaming(variable) != variable:
1415
entry = variables[variable]
1416
del variables[variable]
1417
variables[unsafeVariableNaming(variable)] = entry
1418
1419
uri = variables["uri"]
1420
cookie = variables["cookie"]
1421
1422
for name, entry in variables.items():
1423
if name != "__builtins__" and originals.get(name, "") != entry:
1424
if isinstance(entry, (int, float, six.string_types, six.binary_type)):
1425
found = False
1426
entry = getUnicode(entry, UNICODE_ENCODING)
1427
1428
if kb.postHint == POST_HINT.MULTIPART:
1429
boundary = "--%s" % re.search(r"boundary=([^\s]+)", contentType).group(1)
1430
if boundary:
1431
parts = post.split(boundary)
1432
match = re.search(r'\bname="%s"' % re.escape(name), post)
1433
if not match and parts:
1434
parts.insert(2, parts[1])
1435
parts[2] = re.sub(r'\bname="[^"]+".*', 'name="%s"' % re.escape(name), parts[2])
1436
for i in xrange(len(parts)):
1437
part = parts[i]
1438
if re.search(r'\bname="%s"' % re.escape(name), part):
1439
match = re.search(r"(?s)\A.+?\r?\n\r?\n", part)
1440
if match:
1441
found = True
1442
first = match.group(0)
1443
second = part[len(first):]
1444
second = re.sub(r"(?s).+?(\r?\n?\-*\Z)", r"%s\g<1>" % re.escape(entry), second)
1445
parts[i] = "%s%s" % (first, second)
1446
post = boundary.join(parts)
1447
1448
elif kb.postHint and re.search(r"\b%s\b" % re.escape(name), post or ""):
1449
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
1450
if re.search(r"<%s\b" % re.escape(name), post):
1451
found = True
1452
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
1453
elif re.search(r"\b%s>" % re.escape(name), post):
1454
found = True
1455
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
1456
1457
elif kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
1458
match = re.search(r"['\"]%s['\"]:" % re.escape(name), post)
1459
if match:
1460
quote = match.group(0)[0]
1461
post = post.replace("\\%s" % quote, BOUNDARY_BACKSLASH_MARKER)
1462
match = re.search(r"(%s%s%s:\s*)(\d+|%s[^%s]*%s)" % (quote, re.escape(name), quote, quote, quote, quote), post)
1463
if match:
1464
found = True
1465
post = post.replace(match.group(0), "%s%s" % (match.group(1), entry if entry.isdigit() else "%s%s%s" % (match.group(0)[0], entry, match.group(0)[0])))
1466
post = post.replace(BOUNDARY_BACKSLASH_MARKER, "\\%s" % quote)
1467
1468
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
1469
if not found and re.search(regex, (post or "")):
1470
found = True
1471
post = re.sub(regex, r"\g<1>\g<2>%s" % entry.replace('\\', r'\\'), post)
1472
1473
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
1474
if not found and re.search(regex, (post or "")):
1475
found = True
1476
post = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
1477
1478
if re.search(regex, (get or "")):
1479
found = True
1480
get = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), get)
1481
1482
if re.search(regex, (query or "")):
1483
found = True
1484
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), uri)
1485
1486
regex = r"((\A|%s\s*)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
1487
if re.search(regex, (cookie or "")):
1488
found = True
1489
cookie = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), cookie)
1490
1491
if not found:
1492
if post is not None:
1493
if kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
1494
match = re.search(r"['\"]", post)
1495
if match:
1496
quote = match.group(0)
1497
post = re.sub(r"\}\Z", "%s%s}" % (',' if re.search(r"\w", post) else "", "%s%s%s:%s" % (quote, name, quote, entry if entry.isdigit() else "%s%s%s" % (quote, entry, quote))), post)
1498
else:
1499
post += "%s%s=%s" % (delimiter, name, entry)
1500
elif get is not None:
1501
get += "%s%s=%s" % (delimiter, name, entry)
1502
elif cookie is not None:
1503
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, entry)
1504
1505
if not conf.skipUrlEncode:
1506
get = urlencode(get, limit=True)
1507
1508
if post is not None:
1509
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
1510
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
1511
elif postUrlEncode:
1512
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
1513
1514
if timeBasedCompare and not conf.disableStats:
1515
if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES:
1516
clearConsoleLine()
1517
1518
kb.responseTimes.setdefault(kb.responseTimeMode, [])
1519
1520
if conf.tor:
1521
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
1522
warnMsg += "time-based injections because of inherent high latency time"
1523
singleTimeWarnMessage(warnMsg)
1524
1525
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
1526
warnMsg += "%s statistical model, please wait" % ("larger" if len(kb.responseTimes) == 1 else "reset of")
1527
dataToStdout(warnMsg)
1528
1529
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
1530
_ = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload
1531
Connect.queryPage(value=_, content=True, raise404=False)
1532
dataToStdout('.')
1533
1534
dataToStdout(" (done)\n")
1535
1536
elif not kb.testMode:
1537
warnMsg = "it is very important to not stress the network connection "
1538
warnMsg += "during usage of time-based payloads to prevent potential "
1539
warnMsg += "disruptions "
1540
singleTimeWarnMessage(warnMsg)
1541
1542
if not kb.laggingChecked:
1543
kb.laggingChecked = True
1544
1545
deviation = stdev(kb.responseTimes[kb.responseTimeMode])
1546
1547
if deviation is not None and deviation > WARN_TIME_STDEV:
1548
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
1549
1550
warnMsg = "considerable lagging has been detected "
1551
warnMsg += "in connection response(s). Please use as high "
1552
warnMsg += "value for option '--time-sec' as possible (e.g. "
1553
warnMsg += "10 or more)"
1554
logger.critical(warnMsg)
1555
1556
if (conf.safeFreq or 0) > 0:
1557
kb.queryCounter += 1
1558
if kb.queryCounter % conf.safeFreq == 0:
1559
if conf.safeUrl:
1560
Connect.getPage(url=conf.safeUrl, post=conf.safePost, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
1561
elif kb.safeReq:
1562
Connect.getPage(url=kb.safeReq.url, post=kb.safeReq.post, method=kb.safeReq.method, auxHeaders=kb.safeReq.headers)
1563
1564
start = time.time()
1565
1566
if kb.nullConnection and not content and not response and not timeBasedCompare:
1567
noteResponseTime = False
1568
1569
try:
1570
pushValue(kb.pageCompress)
1571
kb.pageCompress = False
1572
1573
if kb.nullConnection == NULLCONNECTION.HEAD:
1574
method = HTTPMETHOD.HEAD
1575
elif kb.nullConnection == NULLCONNECTION.RANGE:
1576
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
1577
1578
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
1579
1580
if headers:
1581
try:
1582
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
1583
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH].split(',')[0])
1584
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
1585
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
1586
except ValueError:
1587
pass
1588
finally:
1589
kb.pageCompress = popValue()
1590
1591
if pageLength is None:
1592
try:
1593
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
1594
except MemoryError:
1595
page, headers, code = None, None, None
1596
warnMsg = "site returned insanely large response"
1597
if kb.testMode:
1598
warnMsg += " in testing phase. This is a common "
1599
warnMsg += "behavior in custom WAF/IPS solutions"
1600
singleTimeWarnMessage(warnMsg)
1601
1602
if not ignoreSecondOrder:
1603
if conf.secondUrl:
1604
page, headers, code = Connect.getPage(url=conf.secondUrl, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
1605
elif kb.secondReq and IPS_WAF_CHECK_PAYLOAD not in _urllib.parse.unquote(value or ""):
1606
def _(value):
1607
if kb.customInjectionMark in (value or ""):
1608
if payload is None:
1609
value = value.replace(kb.customInjectionMark, "")
1610
else:
1611
try:
1612
value = re.sub(r"\w*%s" % re.escape(kb.customInjectionMark), payload, value)
1613
except re.error:
1614
value = re.sub(r"\w*%s" % re.escape(kb.customInjectionMark), re.escape(payload), value)
1615
return value
1616
page, headers, code = Connect.getPage(url=_(kb.secondReq[0]), post=_(kb.secondReq[2]), method=kb.secondReq[1], cookie=kb.secondReq[3], silent=silent, auxHeaders=dict(auxHeaders, **dict(kb.secondReq[4])), response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
1617
1618
threadData.lastQueryDuration = calculateDeltaSeconds(start)
1619
1620
kb.originalCode = code if kb.originalCode is None else kb.originalCode
1621
kb.originalPage = page if kb.originalPage is None else kb.originalPage
1622
1623
if kb.testMode:
1624
kb.testQueryCount += 1
1625
1626
if timeBasedCompare:
1627
return wasLastResponseDelayed()
1628
elif noteResponseTime:
1629
kb.responseTimes.setdefault(kb.responseTimeMode, [])
1630
kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration)
1631
if len(kb.responseTimes[kb.responseTimeMode]) > MAX_TIME_RESPONSES:
1632
kb.responseTimes[kb.responseTimeMode] = kb.responseTimes[kb.responseTimeMode][-MAX_TIME_RESPONSES // 2:]
1633
1634
if not response and removeReflection:
1635
page = removeReflectiveValues(page, payload)
1636
1637
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
1638
1639
message = extractRegexResult(PERMISSION_DENIED_REGEX, page or "", re.I)
1640
if message:
1641
kb.permissionFlag = True
1642
singleTimeWarnMessage("potential permission problems detected ('%s')" % message)
1643
1644
headers = patchHeaders(headers)
1645
1646
if content or response:
1647
return page, headers, code
1648
1649
if getRatioValue:
1650
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
1651
else:
1652
return comparison(page, headers, code, getRatioValue, pageLength)
1653
1654
def setHTTPHandlers(): # Cross-referenced function
1655
raise NotImplementedError
1656
1657