Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
sqlmapproject
GitHub Repository: sqlmapproject/sqlmap
Path: blob/master/lib/request/connect.py
3554 views
1
#!/usr/bin/env python
2
3
"""
4
Copyright (c) 2006-2026 sqlmap developers (https://sqlmap.org)
5
See the file 'LICENSE' for copying permission
6
"""
7
8
import binascii
9
import inspect
10
import logging
11
import os
12
import random
13
import re
14
import socket
15
import string
16
import struct
17
import sys
18
import time
19
import traceback
20
21
try:
22
import websocket
23
from websocket import WebSocketException
24
except ImportError:
25
class WebSocketException(Exception):
26
pass
27
28
from lib.core.agent import agent
29
from lib.core.common import asciifyUrl
30
from lib.core.common import calculateDeltaSeconds
31
from lib.core.common import checkFile
32
from lib.core.common import checkSameHost
33
from lib.core.common import chunkSplitPostData
34
from lib.core.common import clearConsoleLine
35
from lib.core.common import dataToStdout
36
from lib.core.common import escapeJsonValue
37
from lib.core.common import evaluateCode
38
from lib.core.common import extractRegexResult
39
from lib.core.common import filterNone
40
from lib.core.common import findMultipartPostBoundary
41
from lib.core.common import getCurrentThreadData
42
from lib.core.common import getHeader
43
from lib.core.common import getHostHeader
44
from lib.core.common import getRequestHeader
45
from lib.core.common import getSafeExString
46
from lib.core.common import logHTTPTraffic
47
from lib.core.common import openFile
48
from lib.core.common import popValue
49
from lib.core.common import parseJson
50
from lib.core.common import pushValue
51
from lib.core.common import randomizeParameterValue
52
from lib.core.common import randomInt
53
from lib.core.common import randomStr
54
from lib.core.common import readInput
55
from lib.core.common import removeReflectiveValues
56
from lib.core.common import safeVariableNaming
57
from lib.core.common import singleTimeLogMessage
58
from lib.core.common import singleTimeWarnMessage
59
from lib.core.common import stdev
60
from lib.core.common import unArrayizeValue
61
from lib.core.common import unsafeVariableNaming
62
from lib.core.common import urldecode
63
from lib.core.common import urlencode
64
from lib.core.common import wasLastResponseDelayed
65
from lib.core.compat import LooseVersion
66
from lib.core.compat import patchHeaders
67
from lib.core.compat import xrange
68
from lib.core.convert import encodeBase64
69
from lib.core.convert import getBytes
70
from lib.core.convert import getText
71
from lib.core.convert import getUnicode
72
from lib.core.data import cmdLineOptions
73
from lib.core.data import conf
74
from lib.core.data import kb
75
from lib.core.data import logger
76
from lib.core.datatype import AttribDict
77
from lib.core.decorators import stackedmethod
78
from lib.core.dicts import POST_HINT_CONTENT_TYPES
79
from lib.core.enums import ADJUST_TIME_DELAY
80
from lib.core.enums import AUTH_TYPE
81
from lib.core.enums import CUSTOM_LOGGING
82
from lib.core.enums import HINT
83
from lib.core.enums import HTTP_HEADER
84
from lib.core.enums import HTTPMETHOD
85
from lib.core.enums import NULLCONNECTION
86
from lib.core.enums import PAYLOAD
87
from lib.core.enums import PLACE
88
from lib.core.enums import POST_HINT
89
from lib.core.enums import REDIRECTION
90
from lib.core.enums import WEB_PLATFORM
91
from lib.core.exception import SqlmapCompressionException
92
from lib.core.exception import SqlmapConnectionException
93
from lib.core.exception import SqlmapGenericException
94
from lib.core.exception import SqlmapMissingDependence
95
from lib.core.exception import SqlmapSkipTargetException
96
from lib.core.exception import SqlmapSyntaxException
97
from lib.core.exception import SqlmapTokenException
98
from lib.core.exception import SqlmapValueException
99
from lib.core.settings import ASTERISK_MARKER
100
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
101
from lib.core.settings import DEFAULT_CONTENT_TYPE
102
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
103
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
104
from lib.core.settings import DEFAULT_USER_AGENT
105
from lib.core.settings import EVALCODE_ENCODED_PREFIX
106
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
107
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
108
from lib.core.settings import IPS_WAF_CHECK_PAYLOAD
109
from lib.core.settings import IS_WIN
110
from lib.core.settings import JAVASCRIPT_HREF_REGEX
111
from lib.core.settings import LARGE_READ_TRIM_MARKER
112
from lib.core.settings import LIVE_COOKIES_TIMEOUT
113
from lib.core.settings import MIN_HTTPX_VERSION
114
from lib.core.settings import MAX_CONNECTION_READ_SIZE
115
from lib.core.settings import MAX_CONNECTIONS_REGEX
116
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
117
from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
118
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
119
from lib.core.settings import META_REFRESH_REGEX
120
from lib.core.settings import MAX_TIME_RESPONSES
121
from lib.core.settings import MIN_TIME_RESPONSES
122
from lib.core.settings import PAYLOAD_DELIMITER
123
from lib.core.settings import PERMISSION_DENIED_REGEX
124
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
125
from lib.core.settings import RANDOM_INTEGER_MARKER
126
from lib.core.settings import RANDOM_STRING_MARKER
127
from lib.core.settings import REPLACEMENT_MARKER
128
from lib.core.settings import SAFE_HEX_MARKER
129
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
130
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
131
from lib.core.settings import UNICODE_ENCODING
132
from lib.core.settings import URI_HTTP_HEADER
133
from lib.core.settings import WARN_TIME_STDEV
134
from lib.core.settings import WEBSOCKET_INITIAL_TIMEOUT
135
from lib.core.settings import YUGE_FACTOR
136
from lib.request.basic import decodePage
137
from lib.request.basic import forgeHeaders
138
from lib.request.basic import processResponse
139
from lib.request.comparison import comparison
140
from lib.request.direct import direct
141
from lib.request.methodrequest import MethodRequest
142
from lib.utils.safe2bin import safecharencode
143
from thirdparty import six
144
from thirdparty.odict import OrderedDict
145
from thirdparty.six import unichr as _unichr
146
from thirdparty.six.moves import http_client as _http_client
147
from thirdparty.six.moves import urllib as _urllib
148
from thirdparty.socks.socks import ProxyError
149
150
class Connect(object):
151
"""
152
This class defines methods used to perform HTTP requests
153
"""
154
155
@staticmethod
156
def _getPageProxy(**kwargs):
157
try:
158
if (len(inspect.stack()) > sys.getrecursionlimit() // 2): # Note: https://github.com/sqlmapproject/sqlmap/issues/4525
159
warnMsg = "unable to connect to the target URL"
160
raise SqlmapConnectionException(warnMsg)
161
except (TypeError, UnicodeError):
162
pass
163
164
try:
165
return Connect.getPage(**kwargs)
166
except RuntimeError:
167
return None, None, None
168
169
@staticmethod
170
def _retryProxy(**kwargs):
171
threadData = getCurrentThreadData()
172
threadData.retriesCount += 1
173
174
if conf.proxyList and threadData.retriesCount >= conf.retries and not kb.locks.handlers.locked():
175
warnMsg = "changing proxy"
176
logger.warning(warnMsg)
177
178
conf.proxy = None
179
threadData.retriesCount = 0
180
181
setHTTPHandlers()
182
183
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
184
# timed based payloads can cause web server unresponsiveness
185
# if the injectable piece of code is some kind of JOIN-like query
186
warnMsg = "most likely web server instance hasn't recovered yet "
187
warnMsg += "from previous timed based payload. If the problem "
188
warnMsg += "persists please wait for a few minutes and rerun "
189
warnMsg += "without flag 'T' in option '--technique' "
190
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
191
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
192
singleTimeWarnMessage(warnMsg)
193
194
elif kb.originalPage is None:
195
if conf.tor:
196
warnMsg = "please make sure that you have "
197
warnMsg += "Tor installed and running so "
198
warnMsg += "you could successfully use "
199
warnMsg += "switch '--tor' "
200
if IS_WIN:
201
warnMsg += "(e.g. 'https://www.torproject.org/download/')"
202
else:
203
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
204
else:
205
warnMsg = "if the problem persists please check that the provided "
206
warnMsg += "target URL is reachable"
207
208
items = []
209
if not conf.randomAgent:
210
items.append("switch '--random-agent'")
211
if not any((conf.proxy, conf.proxyFile, conf.tor)):
212
items.append("proxy switches ('--proxy', '--proxy-file'...)")
213
if items:
214
warnMsg += ". In case that it is, "
215
warnMsg += "you can try to rerun with "
216
warnMsg += " and/or ".join(items)
217
218
singleTimeWarnMessage(warnMsg)
219
220
elif conf.threads > 1:
221
warnMsg = "if the problem persists please try to lower "
222
warnMsg += "the number of used threads (option '--threads')"
223
singleTimeWarnMessage(warnMsg)
224
225
kwargs['retrying'] = True
226
return Connect._getPageProxy(**kwargs)
227
228
@staticmethod
229
def _connReadProxy(conn):
230
parts = []
231
232
if not kb.dnsMode and conn:
233
headers = conn.info()
234
if kb.pageCompress and headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
235
part = conn.read(MAX_CONNECTION_TOTAL_SIZE)
236
if len(part) == MAX_CONNECTION_TOTAL_SIZE:
237
warnMsg = "large compressed response detected. Disabling compression"
238
singleTimeWarnMessage(warnMsg)
239
kb.pageCompress = False
240
raise SqlmapCompressionException
241
parts.append(part)
242
else:
243
while True:
244
if not conn:
245
break
246
else:
247
try:
248
part = conn.read(MAX_CONNECTION_READ_SIZE)
249
except AssertionError:
250
part = b""
251
252
if len(part) == MAX_CONNECTION_READ_SIZE:
253
warnMsg = "large response detected. This could take a while"
254
singleTimeWarnMessage(warnMsg)
255
part = re.sub(getBytes(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start)), getBytes("%s%s%s" % (kb.chars.stop, LARGE_READ_TRIM_MARKER, kb.chars.start)), part)
256
parts.append(part)
257
else:
258
parts.append(part)
259
break
260
261
if sum(len(_) for _ in parts) > MAX_CONNECTION_TOTAL_SIZE:
262
warnMsg = "too large response detected. Automatically trimming it"
263
singleTimeWarnMessage(warnMsg)
264
break
265
266
if conf.yuge:
267
parts = YUGE_FACTOR * parts
268
269
retVal = b"".join(parts)
270
271
return retVal
272
273
@staticmethod
274
def getPage(**kwargs):
275
"""
276
This method connects to the target URL or proxy and returns
277
the target URL page content
278
"""
279
280
if conf.offline:
281
return None, None, None
282
283
url = kwargs.get("url", None) or conf.url
284
get = kwargs.get("get", None)
285
post = kwargs.get("post", None)
286
method = kwargs.get("method", None)
287
cookie = kwargs.get("cookie", None)
288
ua = kwargs.get("ua", None) or conf.agent
289
referer = kwargs.get("referer", None) or conf.referer
290
direct_ = kwargs.get("direct", False)
291
multipart = kwargs.get("multipart", None)
292
silent = kwargs.get("silent", False)
293
raise404 = kwargs.get("raise404", True)
294
timeout = kwargs.get("timeout", None) or conf.timeout
295
auxHeaders = kwargs.get("auxHeaders", None)
296
response = kwargs.get("response", False)
297
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
298
refreshing = kwargs.get("refreshing", False)
299
retrying = kwargs.get("retrying", False)
300
crawling = kwargs.get("crawling", False)
301
checking = kwargs.get("checking", False)
302
skipRead = kwargs.get("skipRead", False)
303
finalCode = kwargs.get("finalCode", False)
304
chunked = kwargs.get("chunked", False) or conf.chunked
305
306
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
307
time.sleep(conf.delay)
308
309
start = time.time()
310
311
threadData = getCurrentThreadData()
312
with kb.locks.request:
313
kb.requestCounter += 1
314
threadData.lastRequestUID = kb.requestCounter
315
316
if conf.proxyFreq:
317
if kb.requestCounter % conf.proxyFreq == 0:
318
conf.proxy = None
319
320
warnMsg = "changing proxy"
321
logger.warning(warnMsg)
322
323
setHTTPHandlers()
324
325
if conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
326
if conf.murphyRate:
327
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
328
329
page, headers, code = randomStr(int(randomInt()), alphabet=[_unichr(_) for _ in xrange(256)]), None, None if not conf.murphyRate else randomInt(3)
330
331
threadData.lastPage = page
332
threadData.lastCode = code
333
334
return page, headers, code
335
336
if conf.liveCookies:
337
with kb.locks.liveCookies:
338
if not checkFile(conf.liveCookies, raiseOnError=False) or os.path.getsize(conf.liveCookies) == 0:
339
warnMsg = "[%s] [WARNING] live cookies file '%s' is empty or non-existent. Waiting for timeout (%d seconds)" % (time.strftime("%X"), conf.liveCookies, LIVE_COOKIES_TIMEOUT)
340
dataToStdout(warnMsg)
341
342
valid = False
343
for _ in xrange(LIVE_COOKIES_TIMEOUT):
344
if checkFile(conf.liveCookies, raiseOnError=False) and os.path.getsize(conf.liveCookies) > 0:
345
valid = True
346
break
347
else:
348
dataToStdout('.')
349
time.sleep(1)
350
351
dataToStdout("\n")
352
353
if not valid:
354
errMsg = "problem occurred while loading cookies from file '%s'" % conf.liveCookies
355
raise SqlmapValueException(errMsg)
356
357
cookie = openFile(conf.liveCookies).read().strip()
358
cookie = re.sub(r"(?i)\ACookie:\s*", "", cookie)
359
360
if multipart:
361
post = multipart
362
else:
363
if not post:
364
chunked = False
365
366
elif chunked:
367
post = _urllib.parse.unquote(post)
368
post = chunkSplitPostData(post)
369
370
webSocket = url.lower().startswith("ws")
371
372
if not _urllib.parse.urlsplit(url).netloc:
373
url = _urllib.parse.urljoin(conf.url, url)
374
375
# flag to know if we are dealing with the same target host
376
target = checkSameHost(url, conf.url)
377
378
if not retrying:
379
# Reset the number of connection retries
380
threadData.retriesCount = 0
381
382
# fix for known issue when urllib2 just skips the other part of provided
383
# url splitted with space char while urlencoding it in the later phase
384
url = url.replace(" ", "%20")
385
386
if "://" not in url:
387
url = "http://%s" % url
388
389
conn = None
390
page = None
391
code = None
392
status = None
393
394
_ = _urllib.parse.urlsplit(url)
395
requestMsg = u"HTTP request [#%d]:\r\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
396
requestMsg += getUnicode(("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url)
397
responseMsg = u"HTTP response "
398
requestHeaders = u""
399
responseHeaders = None
400
logHeaders = u""
401
skipLogTraffic = False
402
403
raise404 = raise404 and not kb.ignoreNotFound
404
405
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
406
# support those by default
407
url = asciifyUrl(url)
408
409
try:
410
socket.setdefaulttimeout(timeout)
411
412
if direct_:
413
if '?' in url:
414
url, params = url.split('?', 1)
415
params = urlencode(params)
416
url = "%s?%s" % (url, params)
417
418
elif any((refreshing, crawling, checking)):
419
pass
420
421
elif target:
422
if conf.forceSSL:
423
url = re.sub(r"(?i)\A(http|ws):", r"\g<1>s:", url)
424
url = re.sub(r"(?i):80/", ":443/", url)
425
426
if PLACE.GET in conf.parameters and not get:
427
get = conf.parameters[PLACE.GET]
428
429
if not conf.skipUrlEncode:
430
get = urlencode(get, limit=True)
431
432
if get:
433
if '?' in url:
434
url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
435
requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
436
else:
437
url = "%s?%s" % (url, get)
438
requestMsg += "?%s" % get
439
440
if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
441
post = conf.parameters[PLACE.POST]
442
443
elif get:
444
url = "%s?%s" % (url, get)
445
requestMsg += "?%s" % get
446
447
requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
448
449
# Prepare HTTP headers
450
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: getHeader(dict(conf.httpHeaders), HTTP_HEADER.HOST) or getHostHeader(url)}, base=None if target else {})
451
452
if HTTP_HEADER.COOKIE in headers:
453
cookie = headers[HTTP_HEADER.COOKIE]
454
455
if kb.authHeader:
456
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
457
458
if kb.proxyAuthHeader:
459
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
460
461
if not conf.requestFile or not target:
462
if not getHeader(headers, HTTP_HEADER.ACCEPT):
463
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
464
465
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
466
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
467
468
elif conf.requestFile and getHeader(headers, HTTP_HEADER.USER_AGENT) == DEFAULT_USER_AGENT:
469
for header in headers:
470
if header.upper() == HTTP_HEADER.USER_AGENT.upper():
471
del headers[header]
472
break
473
474
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
475
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE if unArrayizeValue(conf.base64Parameter) != HTTPMETHOD.POST else PLAIN_TEXT_CONTENT_TYPE)
476
477
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
478
warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
479
warnMsg += "Will try to reconstruct"
480
singleTimeWarnMessage(warnMsg)
481
482
boundary = findMultipartPostBoundary(conf.data)
483
if boundary:
484
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
485
486
if conf.keepAlive:
487
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
488
489
if chunked:
490
headers[HTTP_HEADER.TRANSFER_ENCODING] = "chunked"
491
492
if auxHeaders:
493
headers = forgeHeaders(auxHeaders, headers)
494
495
if kb.headersFile:
496
content = openFile(kb.headersFile, 'r').read()
497
for line in content.split("\n"):
498
line = getText(line.strip())
499
if ':' in line:
500
header, value = line.split(':', 1)
501
headers[header] = value
502
503
if conf.localhost:
504
headers[HTTP_HEADER.HOST] = "localhost"
505
506
for key, value in list(headers.items()):
507
if key.upper() == HTTP_HEADER.ACCEPT_ENCODING.upper():
508
value = re.sub(r"(?i)(,)br(,)?", lambda match: ',' if match.group(1) and match.group(2) else "", value) or "identity"
509
510
del headers[key]
511
if isinstance(value, six.string_types):
512
for char in (r"\r", r"\n"):
513
value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value)
514
headers[getBytes(key) if six.PY2 else key] = getBytes(value.strip("\r\n")) # Note: Python3 has_header() expects non-bytes value
515
516
if six.PY2:
517
url = getBytes(url) # Note: Python3 requires text while Python2 has problems when mixing text with binary POST
518
519
if webSocket:
520
ws = websocket.WebSocket()
521
ws.settimeout(WEBSOCKET_INITIAL_TIMEOUT if kb.webSocketRecvCount is None else timeout)
522
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
523
ws.send(urldecode(post or ""))
524
525
_page = []
526
527
if kb.webSocketRecvCount is None:
528
while True:
529
try:
530
_page.append(ws.recv())
531
except websocket.WebSocketTimeoutException:
532
kb.webSocketRecvCount = len(_page)
533
break
534
else:
535
for i in xrange(max(1, kb.webSocketRecvCount)):
536
_page.append(ws.recv())
537
538
page = "\n".join(_page)
539
540
ws.close()
541
code = ws.status
542
status = _http_client.responses[code]
543
544
class _(dict):
545
pass
546
547
responseHeaders = _(ws.getheaders())
548
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
549
550
requestHeaders += "\r\n".join(["%s: %s" % (u"-".join(_.capitalize() for _ in getUnicode(key).split(u'-')) if hasattr(key, "capitalize") else getUnicode(key), getUnicode(value)) for (key, value) in responseHeaders.items()])
551
requestMsg += "\r\n%s" % requestHeaders
552
553
if post is not None:
554
requestMsg += "\r\n\r\n%s" % getUnicode(post)
555
556
requestMsg += "\r\n"
557
558
threadData.lastRequestMsg = requestMsg
559
560
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
561
else:
562
post = getBytes(post)
563
564
if unArrayizeValue(conf.base64Parameter) == HTTPMETHOD.POST:
565
if kb.place != HTTPMETHOD.POST:
566
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
567
else:
568
post = urldecode(post, convall=True)
569
post = encodeBase64(post)
570
571
if target and cmdLineOptions.method or method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
572
req = MethodRequest(url, post, headers)
573
req.set_method(cmdLineOptions.method or method)
574
elif url is not None:
575
req = _urllib.request.Request(url, post, headers)
576
else:
577
return None, None, None
578
579
for function in kb.preprocessFunctions:
580
try:
581
function(req)
582
except Exception as ex:
583
errMsg = "error occurred while running preprocess "
584
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
585
raise SqlmapGenericException(errMsg)
586
else:
587
post, headers = req.data, req.headers
588
589
requestHeaders += "\r\n".join(["%s: %s" % (u"-".join(_.capitalize() for _ in getUnicode(key).split(u'-')) if hasattr(key, "capitalize") else getUnicode(key), getUnicode(value)) for (key, value) in req.header_items()])
590
591
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
592
conf.cj._policy._now = conf.cj._now = int(time.time())
593
with conf.cj._cookies_lock:
594
cookies = conf.cj._cookies_for_request(req)
595
requestHeaders += "\r\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
596
597
if post is not None:
598
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH) and not chunked:
599
requestHeaders += "\r\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
600
601
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
602
requestHeaders += "\r\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive")
603
604
requestMsg += "\r\n%s" % requestHeaders
605
606
if post is not None:
607
requestMsg += "\r\n\r\n%s" % getUnicode(post)
608
609
if not chunked:
610
requestMsg += "\r\n"
611
612
if conf.cj:
613
for cookie in conf.cj:
614
if cookie.value is None:
615
cookie.value = ""
616
else:
617
for char in (r"\r", r"\n"):
618
cookie.value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", cookie.value)
619
620
if conf.http2:
621
try:
622
import httpx
623
except ImportError:
624
raise SqlmapMissingDependence("httpx[http2] not available (e.g. 'pip%s install httpx[http2]')" % ('3' if six.PY3 else ""))
625
626
if LooseVersion(httpx.__version__) < LooseVersion(MIN_HTTPX_VERSION):
627
raise SqlmapMissingDependence("outdated version of httpx detected (%s<%s)" % (httpx.__version__, MIN_HTTPX_VERSION))
628
629
try:
630
proxy_mounts = dict(("%s://" % key, httpx.HTTPTransport(proxy="%s%s" % ("http://" if "://" not in kb.proxies[key] else "", kb.proxies[key]))) for key in kb.proxies) if kb.proxies else None
631
with httpx.Client(verify=False, http2=True, timeout=timeout, follow_redirects=True, cookies=conf.cj, mounts=proxy_mounts) as client:
632
conn = client.request(method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET), url, headers=headers, data=post)
633
except (httpx.HTTPError, httpx.InvalidURL, httpx.CookieConflict, httpx.StreamError) as ex:
634
raise _http_client.HTTPException(getSafeExString(ex))
635
else:
636
conn.code = conn.status_code
637
conn.msg = conn.reason_phrase
638
conn.info = lambda c=conn: c.headers
639
640
conn._read_buffer = conn.read()
641
conn._read_offset = 0
642
643
requestMsg = re.sub(" HTTP/[0-9.]+\r\n", " %s\r\n" % conn.http_version, requestMsg, count=1)
644
645
if not multipart:
646
threadData.lastRequestMsg = requestMsg
647
648
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
649
650
def _read(count=None):
651
offset = conn._read_offset
652
if count is None:
653
result = conn._read_buffer[offset:]
654
conn._read_offset = len(conn._read_buffer)
655
else:
656
result = conn._read_buffer[offset: offset + count]
657
conn._read_offset += len(result)
658
return result
659
660
conn.read = _read
661
else:
662
if not multipart:
663
threadData.lastRequestMsg = requestMsg
664
665
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
666
667
conn = _urllib.request.urlopen(req)
668
669
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
670
kb.authHeader = getUnicode(getRequestHeader(req, HTTP_HEADER.AUTHORIZATION))
671
672
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
673
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
674
675
# Return response object
676
if response:
677
return conn, None, None
678
679
# Get HTTP response
680
if hasattr(conn, "redurl"):
681
page = (threadData.lastRedirectMsg[1] if kb.choices.redirect == REDIRECTION.NO else Connect._connReadProxy(conn)) if not skipRead else None
682
skipLogTraffic = kb.choices.redirect == REDIRECTION.NO
683
code = conn.redcode if not finalCode else code
684
else:
685
page = Connect._connReadProxy(conn) if not skipRead else None
686
687
if conn:
688
code = (code or conn.code) if conn.code == kb.originalCode else conn.code # do not override redirection code (for comparison purposes)
689
responseHeaders = conn.info()
690
responseHeaders[URI_HTTP_HEADER] = conn.geturl() if hasattr(conn, "geturl") else url
691
692
if getattr(conn, "redurl", None) is not None:
693
responseHeaders[HTTP_HEADER.LOCATION] = conn.redurl
694
695
responseHeaders = patchHeaders(responseHeaders)
696
kb.serverHeader = responseHeaders.get(HTTP_HEADER.SERVER, kb.serverHeader)
697
else:
698
code = None
699
responseHeaders = {}
700
701
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
702
status = getUnicode(conn.msg) if conn and getattr(conn, "msg", None) else None
703
704
kb.connErrorCounter = 0
705
706
if not refreshing:
707
refresh = responseHeaders.get(HTTP_HEADER.REFRESH, "").split("url=")[-1].strip()
708
709
if extractRegexResult(META_REFRESH_REGEX, page):
710
refresh = extractRegexResult(META_REFRESH_REGEX, page)
711
712
debugMsg = "got HTML meta refresh header"
713
logger.debug(debugMsg)
714
715
if not refresh:
716
refresh = extractRegexResult(JAVASCRIPT_HREF_REGEX, page)
717
718
if refresh:
719
debugMsg = "got Javascript redirect logic"
720
logger.debug(debugMsg)
721
722
if refresh:
723
if kb.alwaysRefresh is None:
724
msg = "got a refresh intent "
725
msg += "(redirect like response common to login pages) to '%s'. " % refresh
726
msg += "Do you want to apply it from now on? [Y/n]"
727
728
kb.alwaysRefresh = readInput(msg, default='Y', boolean=True)
729
730
if kb.alwaysRefresh:
731
if re.search(r"\Ahttps?://", refresh, re.I):
732
url = refresh
733
else:
734
url = _urllib.parse.urljoin(url, refresh)
735
736
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
737
kwargs["refreshing"] = True
738
kwargs["url"] = url
739
kwargs["get"] = None
740
kwargs["post"] = None
741
742
try:
743
return Connect._getPageProxy(**kwargs)
744
except SqlmapSyntaxException:
745
pass
746
747
# Explicit closing of connection object
748
if conn and not conf.keepAlive:
749
try:
750
if hasattr(conn, "fp") and hasattr(conn.fp, '_sock'):
751
conn.fp._sock.close()
752
conn.close()
753
except Exception as ex:
754
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
755
logger.warning(warnMsg)
756
757
except SqlmapConnectionException as ex:
758
if conf.proxyList and not kb.threadException:
759
warnMsg = "unable to connect to the target URL ('%s')" % getSafeExString(ex)
760
logger.critical(warnMsg)
761
threadData.retriesCount = conf.retries
762
return Connect._retryProxy(**kwargs)
763
else:
764
raise
765
766
except _urllib.error.HTTPError as ex:
767
page = None
768
responseHeaders = None
769
770
if checking:
771
return None, None, None
772
773
try:
774
page = ex.read() if not skipRead else None
775
responseHeaders = ex.info()
776
responseHeaders[URI_HTTP_HEADER] = ex.geturl()
777
responseHeaders = patchHeaders(responseHeaders)
778
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
779
except socket.timeout:
780
warnMsg = "connection timed out while trying "
781
warnMsg += "to get error page information (%d)" % ex.code
782
logger.warning(warnMsg)
783
return None, None, None
784
except KeyboardInterrupt:
785
raise
786
except:
787
pass
788
finally:
789
page = getUnicode(page)
790
791
code = ex.code
792
status = getUnicode(getattr(ex, "reason", None) or getSafeExString(ex).split(": ", 1)[-1])
793
794
kb.originalCode = kb.originalCode or code
795
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
796
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
797
798
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
799
800
if responseHeaders and getattr(responseHeaders, "headers", None):
801
logHeaders = "".join(getUnicode(responseHeaders.headers)).strip()
802
803
logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time())
804
805
skipLogTraffic = True
806
807
if conf.verbose <= 5:
808
responseMsg += getUnicode(logHeaders)
809
elif conf.verbose > 5:
810
responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE])
811
812
if not multipart:
813
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
814
815
if code in conf.abortCode:
816
errMsg = "aborting due to detected HTTP code '%d'" % code
817
singleTimeLogMessage(errMsg, logging.CRITICAL)
818
raise SystemExit
819
820
if ex.code not in (conf.ignoreCode or []):
821
if ex.code == _http_client.UNAUTHORIZED:
822
errMsg = "not authorized, try to provide right HTTP "
823
errMsg += "authentication type and valid credentials (%d). " % code
824
errMsg += "If this is intended, try to rerun by providing "
825
errMsg += "a valid value for option '--ignore-code'"
826
raise SqlmapConnectionException(errMsg)
827
elif chunked and ex.code in (_http_client.METHOD_NOT_ALLOWED, _http_client.LENGTH_REQUIRED):
828
warnMsg = "turning off HTTP chunked transfer encoding "
829
warnMsg += "as it seems that the target site doesn't support it (%d)" % code
830
singleTimeWarnMessage(warnMsg)
831
conf.chunked = kwargs["chunked"] = False
832
return Connect.getPage(**kwargs)
833
elif ex.code == _http_client.REQUEST_URI_TOO_LONG:
834
warnMsg = "request URI is marked as too long by the target. "
835
warnMsg += "you are advised to try a switch '--no-cast' and/or '--no-escape'"
836
singleTimeWarnMessage(warnMsg)
837
elif ex.code == _http_client.NOT_FOUND:
838
if raise404:
839
errMsg = "page not found (%d)" % code
840
raise SqlmapConnectionException(errMsg)
841
else:
842
debugMsg = "page not found (%d)" % code
843
singleTimeLogMessage(debugMsg, logging.DEBUG)
844
elif ex.code == _http_client.GATEWAY_TIMEOUT:
845
if ignoreTimeout:
846
return None if not conf.ignoreTimeouts else "", None, None
847
else:
848
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, _http_client.responses[ex.code])
849
if threadData.retriesCount < conf.retries and not kb.threadException:
850
warnMsg += ". sqlmap is going to retry the request"
851
logger.critical(warnMsg)
852
return Connect._retryProxy(**kwargs)
853
elif kb.testMode:
854
logger.critical(warnMsg)
855
return None, None, None
856
else:
857
raise SqlmapConnectionException(warnMsg)
858
else:
859
debugMsg = "got HTTP error code: %d ('%s')" % (code, status)
860
logger.debug(debugMsg)
861
862
except (_urllib.error.URLError, socket.error, socket.timeout, _http_client.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError, OverflowError, AttributeError, OSError, AssertionError, KeyError):
863
tbMsg = traceback.format_exc()
864
865
if conf.debug:
866
dataToStdout(tbMsg)
867
868
if checking:
869
return None, None, None
870
elif "KeyError:" in tbMsg:
871
if "content-length" in tbMsg:
872
return None, None, None
873
else:
874
raise
875
elif "AttributeError:" in tbMsg:
876
if "WSAECONNREFUSED" in tbMsg:
877
return None, None, None
878
else:
879
raise
880
elif "no host given" in tbMsg:
881
warnMsg = "invalid URL address used (%s)" % repr(url)
882
raise SqlmapSyntaxException(warnMsg)
883
elif any(_ in tbMsg for _ in ("forcibly closed", "Connection is already closed", "ConnectionAbortedError")):
884
warnMsg = "connection was forcibly closed by the target URL"
885
elif "timed out" in tbMsg:
886
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
887
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is dropping 'suspicious' requests")
888
kb.droppingRequests = True
889
warnMsg = "connection timed out to the target URL"
890
elif "Connection reset" in tbMsg:
891
if not conf.disablePrecon:
892
singleTimeWarnMessage("turning off pre-connect mechanism because of connection reset(s)")
893
conf.disablePrecon = True
894
895
if kb.testMode:
896
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is resetting 'suspicious' requests")
897
kb.droppingRequests = True
898
warnMsg = "connection reset to the target URL"
899
elif "URLError" in tbMsg or "error" in tbMsg:
900
warnMsg = "unable to connect to the target URL"
901
match = re.search(r"Errno \d+\] ([^>\n]+)", tbMsg)
902
if match:
903
warnMsg += " ('%s')" % match.group(1).strip()
904
elif "NTLM" in tbMsg:
905
warnMsg = "there has been a problem with NTLM authentication"
906
elif "Invalid header name" in tbMsg: # (e.g. PostgreSQL ::Text payload)
907
return None, None, None
908
elif "BadStatusLine" in tbMsg:
909
warnMsg = "connection dropped or unknown HTTP "
910
warnMsg += "status code received"
911
if not conf.agent and not conf.randomAgent:
912
warnMsg += ". Try to force the HTTP User-Agent "
913
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
914
elif "IncompleteRead" in tbMsg:
915
warnMsg = "there was an incomplete read error while retrieving data "
916
warnMsg += "from the target URL"
917
elif "Handshake status" in tbMsg:
918
status = re.search(r"Handshake status ([\d]{3})", tbMsg)
919
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
920
raise SqlmapConnectionException(errMsg)
921
elif "SqlmapCompressionException" in tbMsg:
922
warnMsg = "problems with response (de)compression"
923
retrying = True
924
else:
925
warnMsg = "unable to connect to the target URL"
926
927
if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)):
928
warnMsg += " or proxy"
929
930
if silent:
931
return None, None, None
932
933
with kb.locks.connError:
934
kb.connErrorCounter += 1
935
936
if kb.connErrorCounter >= MAX_CONSECUTIVE_CONNECTION_ERRORS and kb.choices.connError is None:
937
message = "there seems to be a continuous problem with connection to the target. "
938
message += "Are you sure that you want to continue? [y/N] "
939
940
kb.choices.connError = readInput(message, default='N', boolean=True)
941
942
if kb.choices.connError is False:
943
raise SqlmapSkipTargetException
944
945
if "forcibly closed" in tbMsg:
946
logger.critical(warnMsg)
947
return None, None, None
948
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead", "Interrupted system call")):
949
return None if not conf.ignoreTimeouts else "", None, None
950
elif threadData.retriesCount < conf.retries and not kb.threadException:
951
warnMsg += ". sqlmap is going to retry the request"
952
if not retrying:
953
warnMsg += "(s)"
954
logger.critical(warnMsg)
955
else:
956
logger.debug(warnMsg)
957
return Connect._retryProxy(**kwargs)
958
elif kb.testMode or kb.multiThreadMode:
959
logger.critical(warnMsg)
960
return None, None, None
961
else:
962
raise SqlmapConnectionException(warnMsg)
963
964
finally:
965
for function in kb.postprocessFunctions:
966
try:
967
page, responseHeaders, code = function(page, responseHeaders, code)
968
except Exception as ex:
969
errMsg = "error occurred while running postprocess "
970
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
971
raise SqlmapGenericException(errMsg)
972
973
if isinstance(page, six.binary_type):
974
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
975
page = six.text_type(page, errors="ignore")
976
else:
977
page = getUnicode(page)
978
979
for _ in (getattr(conn, "redcode", None), code):
980
if _ is not None and _ in conf.abortCode:
981
errMsg = "aborting due to detected HTTP code '%d'" % _
982
singleTimeLogMessage(errMsg, logging.CRITICAL)
983
raise SystemExit
984
985
threadData.lastPage = page
986
threadData.lastCode = code
987
988
socket.setdefaulttimeout(conf.timeout)
989
990
# Dirty patch for Python3.11.0a7 (e.g. https://github.com/sqlmapproject/sqlmap/issues/5091)
991
if not sys.version.startswith("3.11."):
992
if conf.retryOn and re.search(conf.retryOn, page, re.I):
993
if threadData.retriesCount < conf.retries:
994
warnMsg = "forced retry of the request because of undesired page content"
995
logger.warning(warnMsg)
996
return Connect._retryProxy(**kwargs)
997
998
processResponse(page, responseHeaders, code, status)
999
1000
if not skipLogTraffic:
1001
if conn and getattr(conn, "redurl", None):
1002
_ = _urllib.parse.urlsplit(conn.redurl)
1003
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
1004
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", r"\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
1005
1006
if kb.resendPostOnRedirect is False:
1007
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", r"\g<1>GET ", requestMsg)
1008
requestMsg = re.sub(r"(?i)Content-length: \d+\n", "", requestMsg)
1009
requestMsg = re.sub(r"(?s)\n\n.+", "\n", requestMsg)
1010
1011
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
1012
elif "\n" not in responseMsg:
1013
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
1014
1015
if responseHeaders:
1016
logHeaders = "".join(getUnicode(responseHeaders.headers)).strip()
1017
1018
logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time())
1019
1020
if conf.verbose <= 5:
1021
responseMsg += getUnicode(logHeaders)
1022
elif conf.verbose > 5:
1023
responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE])
1024
1025
if not multipart:
1026
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
1027
1028
return page, responseHeaders, code
1029
1030
@staticmethod
1031
@stackedmethod
1032
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True, disableTampering=False, ignoreSecondOrder=False):
1033
"""
1034
This method calls a function to get the target URL page content
1035
and returns its page ratio (0 <= ratio <= 1) or a boolean value
1036
representing False/True match in case of !getRatioValue
1037
"""
1038
1039
if conf.direct:
1040
return direct(value, content)
1041
1042
get = None
1043
post = None
1044
cookie = None
1045
ua = None
1046
referer = None
1047
host = None
1048
page = None
1049
pageLength = None
1050
uri = None
1051
code = None
1052
1053
if not place:
1054
place = kb.injection.place or PLACE.GET
1055
1056
kb.place = place
1057
1058
if not auxHeaders:
1059
auxHeaders = {}
1060
1061
raise404 = place != PLACE.URI if raise404 is None else raise404
1062
method = method or conf.method
1063
1064
postUrlEncode = kb.postUrlEncode
1065
1066
value = agent.adjustLateValues(value)
1067
payload = agent.extractPayload(value)
1068
threadData = getCurrentThreadData()
1069
1070
if conf.httpHeaders:
1071
headers = OrderedDict(conf.httpHeaders)
1072
contentType = max(headers[_] or "" if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else "" for _ in headers) or None
1073
1074
if (kb.postHint or conf.skipUrlEncode) and postUrlEncode:
1075
postUrlEncode = False
1076
if not (conf.skipUrlEncode and contentType): # NOTE: https://github.com/sqlmapproject/sqlmap/issues/5092
1077
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
1078
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
1079
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
1080
if "urlencoded" in contentType:
1081
postUrlEncode = True
1082
1083
if payload:
1084
delimiter = conf.paramDel or (DEFAULT_GET_POST_DELIMITER if place != PLACE.COOKIE else DEFAULT_COOKIE_DELIMITER)
1085
1086
if not disableTampering and kb.tamperFunctions:
1087
for function in kb.tamperFunctions:
1088
hints = {}
1089
1090
try:
1091
payload = function(payload=payload, headers=auxHeaders, delimiter=delimiter, hints=hints)
1092
except Exception as ex:
1093
errMsg = "error occurred while running tamper "
1094
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
1095
raise SqlmapGenericException(errMsg)
1096
1097
if not isinstance(payload, six.string_types):
1098
errMsg = "tamper function '%s' returns " % function.__name__
1099
errMsg += "invalid payload type ('%s')" % type(payload)
1100
raise SqlmapValueException(errMsg)
1101
1102
value = agent.replacePayload(value, payload)
1103
1104
if hints:
1105
if HINT.APPEND in hints:
1106
value = "%s%s%s" % (value, delimiter, hints[HINT.APPEND])
1107
1108
if HINT.PREPEND in hints:
1109
if place == PLACE.URI:
1110
match = re.search(r"\w+\s*=\s*%s" % PAYLOAD_DELIMITER, value) or re.search(r"[^?%s/]=\s*%s" % (re.escape(delimiter), PAYLOAD_DELIMITER), value)
1111
if match:
1112
value = value.replace(match.group(0), "%s%s%s" % (hints[HINT.PREPEND], delimiter, match.group(0)))
1113
else:
1114
value = "%s%s%s" % (hints[HINT.PREPEND], delimiter, value)
1115
1116
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
1117
1118
if place == PLACE.CUSTOM_POST and kb.postHint:
1119
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML) and not conf.skipXmlEncode:
1120
# payloads in SOAP/XML should have chars > and < replaced
1121
# with their HTML encoded counterparts
1122
payload = payload.replace("&#", SAFE_HEX_MARKER)
1123
payload = payload.replace('&', "&amp;").replace('>', "&gt;").replace('<', "&lt;").replace('"', "&quot;").replace("'", "&apos;") # Reference: https://stackoverflow.com/a/1091953
1124
payload = payload.replace(SAFE_HEX_MARKER, "&#")
1125
elif kb.postHint == POST_HINT.JSON:
1126
payload = escapeJsonValue(payload)
1127
elif kb.postHint == POST_HINT.JSON_LIKE:
1128
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
1129
payload = escapeJsonValue(payload)
1130
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
1131
value = agent.replacePayload(value, payload)
1132
else:
1133
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
1134
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper()) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
1135
skip = False
1136
1137
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper():
1138
if kb.choices.cookieEncode is None:
1139
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
1140
kb.choices.cookieEncode = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N', boolean=True)
1141
if not kb.choices.cookieEncode:
1142
skip = True
1143
1144
if not skip:
1145
if place in (PLACE.POST, PLACE.CUSTOM_POST): # potential problems in other cases (e.g. URL encoding of whole URI - including path)
1146
value = urlencode(value, spaceplus=kb.postSpaceToPlus)
1147
payload = urlencode(payload, safe='%', spaceplus=kb.postSpaceToPlus)
1148
value = agent.replacePayload(value, payload)
1149
postUrlEncode = False
1150
1151
if conf.hpp:
1152
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_PLATFORM.ASP, WEB_PLATFORM.ASPX)):
1153
warnMsg = "HTTP parameter pollution should work only against "
1154
warnMsg += "ASP(.NET) targets"
1155
singleTimeWarnMessage(warnMsg)
1156
if place in (PLACE.GET, PLACE.POST):
1157
_ = re.escape(PAYLOAD_DELIMITER)
1158
match = re.search(r"(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
1159
if match:
1160
payload = match.group("value")
1161
1162
for splitter in (urlencode(' '), ' '):
1163
if splitter in payload:
1164
prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*"))
1165
parts = payload.split(splitter)
1166
parts[0] = "%s%s" % (parts[0], suffix)
1167
parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1])
1168
for i in xrange(1, len(parts) - 1):
1169
parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix)
1170
payload = "".join(parts)
1171
1172
for splitter in (urlencode(','), ','):
1173
payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")))
1174
1175
value = agent.replacePayload(value, payload)
1176
else:
1177
warnMsg = "HTTP parameter pollution works only with regular "
1178
warnMsg += "GET and POST parameters"
1179
singleTimeWarnMessage(warnMsg)
1180
1181
if place:
1182
value = agent.removePayloadDelimiters(value)
1183
1184
if PLACE.GET in conf.parameters:
1185
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
1186
elif place == PLACE.GET: # Note: for (e.g.) checkWaf() when there are no GET parameters
1187
get = value
1188
1189
if PLACE.POST in conf.parameters:
1190
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
1191
elif place == PLACE.POST:
1192
post = value
1193
1194
if PLACE.CUSTOM_POST in conf.parameters:
1195
post = conf.parameters[PLACE.CUSTOM_POST].replace(kb.customInjectionMark, "") if place != PLACE.CUSTOM_POST or not value else value
1196
post = post.replace(ASTERISK_MARKER, '*') if post else post
1197
1198
if PLACE.COOKIE in conf.parameters:
1199
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
1200
1201
if PLACE.USER_AGENT in conf.parameters:
1202
ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value
1203
1204
if PLACE.REFERER in conf.parameters:
1205
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
1206
1207
if PLACE.HOST in conf.parameters:
1208
host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value
1209
1210
if PLACE.URI in conf.parameters:
1211
uri = conf.url if place != PLACE.URI or not value else value
1212
else:
1213
uri = conf.url
1214
1215
if value and place == PLACE.CUSTOM_HEADER:
1216
if value.split(',')[0].capitalize() == PLACE.COOKIE:
1217
cookie = value.split(',', 1)[-1]
1218
else:
1219
auxHeaders[value.split(',')[0]] = value.split(',', 1)[-1]
1220
1221
if conf.csrfToken:
1222
token = AttribDict()
1223
1224
def _adjustParameter(paramString, parameter, newValue):
1225
retVal = paramString
1226
1227
if urlencode(parameter) in paramString:
1228
parameter = urlencode(parameter)
1229
1230
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString, re.I)
1231
if match:
1232
retVal = re.sub(r"(?i)%s" % re.escape(match.group(0)), ("%s=%s" % (parameter, newValue)).replace('\\', r'\\'), paramString)
1233
else:
1234
match = re.search(r"(%s[\"']\s*:\s*[\"'])([^\"']*)" % re.escape(parameter), paramString, re.I)
1235
if match:
1236
retVal = re.sub(r"(?i)%s" % re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
1237
1238
return retVal
1239
1240
for attempt in xrange(conf.csrfRetries + 1):
1241
if token:
1242
break
1243
1244
if attempt > 0:
1245
warnMsg = "unable to find anti-CSRF token '%s' at '%s'" % (conf.csrfToken._original, conf.csrfUrl or conf.url)
1246
warnMsg += ". sqlmap is going to retry the request"
1247
logger.warning(warnMsg)
1248
1249
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, post=conf.csrfData or (conf.data if conf.csrfUrl == conf.url and (conf.csrfMethod or "").upper() == HTTPMETHOD.POST else None), method=conf.csrfMethod or (conf.method if conf.csrfUrl == conf.url else None), cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
1250
page = urldecode(page) # for anti-CSRF tokens with special characters in their name (e.g. 'foo:bar=...')
1251
1252
match = re.search(r"(?i)<input[^>]+\bname=[\"']?(?P<name>%s)\b[^>]*\bvalue=[\"']?(?P<value>[^>'\"]*)" % conf.csrfToken, page or "", re.I)
1253
1254
if not match:
1255
match = re.search(r"(?i)<input[^>]+\bvalue=[\"']?(?P<value>[^>'\"]*)[\"']?[^>]*\bname=[\"']?(?P<name>%s)\b" % conf.csrfToken, page or "", re.I)
1256
1257
if not match:
1258
match = re.search(r"(?P<name>%s)[\"']:[\"'](?P<value>[^\"']+)" % conf.csrfToken, page or "", re.I)
1259
1260
if not match:
1261
match = re.search(r"\b(?P<name>%s)\s*[:=]\s*(?P<value>\w+)" % conf.csrfToken, getUnicode(headers), re.I)
1262
1263
if not match:
1264
match = re.search(r"\b(?P<name>%s)\s*=\s*['\"]?(?P<value>[^;'\"]+)" % conf.csrfToken, page or "", re.I)
1265
1266
if not match:
1267
match = re.search(r"<meta\s+name=[\"']?(?P<name>%s)[\"']?[^>]+\b(value|content)=[\"']?(?P<value>[^>\"']+)" % conf.csrfToken, page or "", re.I)
1268
1269
if match:
1270
token.name, token.value = match.group("name"), match.group("value")
1271
1272
match = re.search(r"String\.fromCharCode\(([\d+, ]+)\)", token.value)
1273
if match:
1274
token.value = "".join(_unichr(int(_)) for _ in match.group(1).replace(' ', "").split(','))
1275
1276
if not token:
1277
if conf.csrfUrl and conf.csrfToken and conf.csrfUrl != conf.url and code == _http_client.OK:
1278
if headers and PLAIN_TEXT_CONTENT_TYPE in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
1279
token.name = conf.csrfToken
1280
token.value = page
1281
1282
if not token and conf.cj and any(re.search(conf.csrfToken, _.name, re.I) for _ in conf.cj):
1283
for _ in conf.cj:
1284
if re.search(conf.csrfToken, _.name, re.I):
1285
token.name, token.value = _.name, _.value
1286
if not any(re.search(conf.csrfToken, ' '.join(_), re.I) for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
1287
if post:
1288
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
1289
elif get:
1290
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
1291
else:
1292
get = "%s=%s" % (token.name, token.value)
1293
break
1294
1295
if not token:
1296
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken._original, conf.csrfUrl or conf.url)
1297
if not conf.csrfUrl:
1298
errMsg += ". You can try to rerun by providing "
1299
errMsg += "a valid value for option '--csrf-url'"
1300
raise SqlmapTokenException(errMsg)
1301
1302
if token:
1303
token.value = token.value.strip("'\"")
1304
1305
for candidate in (PLACE.GET, PLACE.POST, PLACE.CUSTOM_POST, PLACE.URI):
1306
if candidate in conf.parameters:
1307
if candidate == PLACE.URI and uri:
1308
uri = _adjustParameter(uri, token.name, token.value)
1309
elif candidate == PLACE.GET and get:
1310
get = _adjustParameter(get, token.name, token.value)
1311
elif candidate in (PLACE.POST, PLACE.CUSTOM_POST) and post:
1312
post = _adjustParameter(post, token.name, token.value)
1313
1314
for i in xrange(len(conf.httpHeaders)):
1315
if conf.httpHeaders[i][0].lower() == token.name.lower():
1316
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token.value)
1317
1318
if conf.rParam:
1319
def _randomizeParameter(paramString, randomParameter):
1320
retVal = paramString
1321
match = re.search(r"(\A|\b)%s=(?P<value>[^&;]*)" % re.escape(randomParameter), paramString)
1322
if match:
1323
origValue = match.group("value")
1324
newValue = randomizeParameterValue(origValue) if randomParameter not in kb.randomPool else random.sample(kb.randomPool[randomParameter], 1)[0]
1325
retVal = re.sub(r"(\A|\b)%s=[^&;]*" % re.escape(randomParameter), "%s=%s" % (randomParameter, newValue), paramString)
1326
else:
1327
match = re.search(r"(\A|\b)(%s\b[^\w]+)(?P<value>\w+)" % re.escape(randomParameter), paramString)
1328
if match:
1329
origValue = match.group("value")
1330
newValue = randomizeParameterValue(origValue) if randomParameter not in kb.randomPool else random.sample(kb.randomPool[randomParameter], 1)[0]
1331
retVal = paramString.replace(match.group(0), "%s%s" % (match.group(2), newValue))
1332
return retVal
1333
1334
for randomParameter in conf.rParam:
1335
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST):
1336
if item in conf.parameters:
1337
if item == PLACE.GET and get:
1338
get = _randomizeParameter(get, randomParameter)
1339
elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post:
1340
post = _randomizeParameter(post, randomParameter)
1341
elif item == PLACE.COOKIE and cookie:
1342
cookie = _randomizeParameter(cookie, randomParameter)
1343
elif item == PLACE.URI and uri:
1344
uri = _randomizeParameter(uri, randomParameter)
1345
1346
if conf.evalCode:
1347
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
1348
variables = {"uri": uri, "lastPage": threadData.lastPage, "_locals": locals(), "cookie": cookie}
1349
originals = {}
1350
1351
if not get and PLACE.URI in conf.parameters:
1352
query = _urllib.parse.urlsplit(uri).query or ""
1353
else:
1354
query = None
1355
1356
for item in filterNone((get, post if not kb.postHint else None, query)):
1357
for part in item.split(delimiter):
1358
if '=' in part:
1359
name, value = part.split('=', 1)
1360
name = name.strip()
1361
if safeVariableNaming(name) != name:
1362
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
1363
name = safeVariableNaming(name)
1364
value = urldecode(value, convall=True, spaceplus=(item == post and kb.postSpaceToPlus))
1365
variables[name] = value
1366
1367
if post and kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
1368
for name, value in (parseJson(post) or {}).items():
1369
if safeVariableNaming(name) != name:
1370
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
1371
name = safeVariableNaming(name)
1372
variables[name] = value
1373
1374
if cookie:
1375
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
1376
if '=' in part:
1377
name, value = part.split('=', 1)
1378
name = name.strip()
1379
if safeVariableNaming(name) != name:
1380
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
1381
name = safeVariableNaming(name)
1382
value = urldecode(value, convall=True)
1383
variables[name] = value
1384
1385
while True:
1386
try:
1387
compile(getBytes(re.sub(r"\s*;\s*", "\n", conf.evalCode)), "", "exec")
1388
except SyntaxError as ex:
1389
if ex.text:
1390
original = replacement = getUnicode(ex.text.strip())
1391
1392
if '=' in original:
1393
name, value = original.split('=', 1)
1394
name = name.strip()
1395
if safeVariableNaming(name) != name:
1396
replacement = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), replacement)
1397
else:
1398
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
1399
if safeVariableNaming(_) != _:
1400
replacement = replacement.replace(_, safeVariableNaming(_))
1401
break
1402
1403
if original == replacement:
1404
conf.evalCode = conf.evalCode.replace(EVALCODE_ENCODED_PREFIX, "")
1405
break
1406
else:
1407
conf.evalCode = conf.evalCode.replace(getUnicode(ex.text.strip(), UNICODE_ENCODING), replacement)
1408
else:
1409
break
1410
else:
1411
break
1412
1413
originals.update(variables)
1414
evaluateCode(conf.evalCode, variables)
1415
1416
for variable in list(variables.keys()):
1417
if unsafeVariableNaming(variable) != variable:
1418
entry = variables[variable]
1419
del variables[variable]
1420
variables[unsafeVariableNaming(variable)] = entry
1421
1422
uri = variables["uri"]
1423
cookie = variables["cookie"]
1424
1425
for name, entry in variables.items():
1426
if name != "__builtins__" and originals.get(name, "") != entry:
1427
if isinstance(entry, (int, float, six.string_types, six.binary_type)):
1428
found = False
1429
entry = getUnicode(entry, UNICODE_ENCODING)
1430
1431
if kb.postHint == POST_HINT.MULTIPART:
1432
boundary = "--%s" % re.search(r"boundary=([^\s]+)", contentType).group(1)
1433
if boundary:
1434
parts = post.split(boundary)
1435
match = re.search(r'\bname="%s"' % re.escape(name), post)
1436
if not match and parts:
1437
parts.insert(2, parts[1])
1438
parts[2] = re.sub(r'\bname="[^"]+".*', 'name="%s"' % re.escape(name), parts[2])
1439
for i in xrange(len(parts)):
1440
part = parts[i]
1441
if re.search(r'\bname="%s"' % re.escape(name), part):
1442
match = re.search(r"(?s)\A.+?\r?\n\r?\n", part)
1443
if match:
1444
found = True
1445
first = match.group(0)
1446
second = part[len(first):]
1447
second = re.sub(r"(?s).+?(\r?\n?\-*\Z)", r"%s\g<1>" % re.escape(entry), second)
1448
parts[i] = "%s%s" % (first, second)
1449
post = boundary.join(parts)
1450
1451
elif kb.postHint and re.search(r"\b%s\b" % re.escape(name), post or ""):
1452
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
1453
if re.search(r"<%s\b" % re.escape(name), post):
1454
found = True
1455
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
1456
elif re.search(r"\b%s>" % re.escape(name), post):
1457
found = True
1458
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
1459
1460
elif kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
1461
match = re.search(r"['\"]%s['\"]:" % re.escape(name), post)
1462
if match:
1463
quote = match.group(0)[0]
1464
post = post.replace("\\%s" % quote, BOUNDARY_BACKSLASH_MARKER)
1465
match = re.search(r"(%s%s%s:\s*)(\d+|%s[^%s]*%s)" % (quote, re.escape(name), quote, quote, quote, quote), post)
1466
if match:
1467
found = True
1468
post = post.replace(match.group(0), "%s%s" % (match.group(1), entry if entry.isdigit() else "%s%s%s" % (match.group(0)[0], entry, match.group(0)[0])))
1469
post = post.replace(BOUNDARY_BACKSLASH_MARKER, "\\%s" % quote)
1470
1471
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
1472
if not found and re.search(regex, (post or "")):
1473
found = True
1474
post = re.sub(regex, r"\g<1>\g<2>%s" % entry.replace('\\', r'\\'), post)
1475
1476
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
1477
if not found and re.search(regex, (post or "")):
1478
found = True
1479
post = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
1480
1481
if re.search(regex, (get or "")):
1482
found = True
1483
get = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), get)
1484
1485
if re.search(regex, (query or "")):
1486
found = True
1487
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), uri)
1488
1489
regex = r"((\A|%s\s*)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
1490
if re.search(regex, (cookie or "")):
1491
found = True
1492
cookie = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), cookie)
1493
1494
if not found:
1495
if post is not None:
1496
if kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
1497
match = re.search(r"['\"]", post)
1498
if match:
1499
quote = match.group(0)
1500
post = re.sub(r"\}\Z", "%s%s}" % (',' if re.search(r"\w", post) else "", "%s%s%s:%s" % (quote, name, quote, entry if entry.isdigit() else "%s%s%s" % (quote, entry, quote))), post)
1501
else:
1502
post += "%s%s=%s" % (delimiter, name, entry)
1503
elif get is not None:
1504
get += "%s%s=%s" % (delimiter, name, entry)
1505
elif cookie is not None:
1506
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, entry)
1507
1508
if not conf.skipUrlEncode:
1509
get = urlencode(get, limit=True)
1510
1511
if post is not None:
1512
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
1513
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
1514
elif postUrlEncode:
1515
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
1516
1517
if timeBasedCompare and not conf.disableStats:
1518
if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES:
1519
clearConsoleLine()
1520
1521
kb.responseTimes.setdefault(kb.responseTimeMode, [])
1522
1523
if conf.tor:
1524
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
1525
warnMsg += "time-based injections because of inherent high latency time"
1526
singleTimeWarnMessage(warnMsg)
1527
1528
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
1529
warnMsg += "%s statistical model, please wait" % ("larger" if len(kb.responseTimes) == 1 else "reset of")
1530
dataToStdout(warnMsg)
1531
1532
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
1533
_ = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload
1534
Connect.queryPage(value=_, content=True, raise404=False)
1535
dataToStdout('.')
1536
1537
dataToStdout(" (done)\n")
1538
1539
elif not kb.testMode:
1540
warnMsg = "it is very important to not stress the network connection "
1541
warnMsg += "during usage of time-based payloads to prevent potential "
1542
warnMsg += "disruptions "
1543
singleTimeWarnMessage(warnMsg)
1544
1545
if not kb.laggingChecked:
1546
kb.laggingChecked = True
1547
1548
deviation = stdev(kb.responseTimes[kb.responseTimeMode])
1549
1550
if deviation is not None and deviation > WARN_TIME_STDEV:
1551
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
1552
1553
warnMsg = "considerable lagging has been detected "
1554
warnMsg += "in connection response(s). Please use as high "
1555
warnMsg += "value for option '--time-sec' as possible (e.g. "
1556
warnMsg += "10 or more)"
1557
logger.critical(warnMsg)
1558
1559
if (conf.safeFreq or 0) > 0:
1560
kb.queryCounter += 1
1561
if kb.queryCounter % conf.safeFreq == 0:
1562
if conf.safeUrl:
1563
Connect.getPage(url=conf.safeUrl, post=conf.safePost, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
1564
elif kb.safeReq:
1565
Connect.getPage(url=kb.safeReq.url, post=kb.safeReq.post, method=kb.safeReq.method, auxHeaders=kb.safeReq.headers)
1566
1567
start = time.time()
1568
1569
if kb.nullConnection and not content and not response and not timeBasedCompare:
1570
noteResponseTime = False
1571
1572
try:
1573
pushValue(kb.pageCompress)
1574
kb.pageCompress = False
1575
1576
if kb.nullConnection == NULLCONNECTION.HEAD:
1577
method = HTTPMETHOD.HEAD
1578
elif kb.nullConnection == NULLCONNECTION.RANGE:
1579
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
1580
1581
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
1582
1583
if headers:
1584
try:
1585
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
1586
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH].split(',')[0])
1587
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
1588
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
1589
except ValueError:
1590
pass
1591
finally:
1592
kb.pageCompress = popValue()
1593
1594
if pageLength is None:
1595
try:
1596
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
1597
except MemoryError:
1598
page, headers, code = None, None, None
1599
warnMsg = "site returned insanely large response"
1600
if kb.testMode:
1601
warnMsg += " in testing phase. This is a common "
1602
warnMsg += "behavior in custom WAF/IPS solutions"
1603
singleTimeWarnMessage(warnMsg)
1604
1605
if not ignoreSecondOrder:
1606
if conf.secondUrl:
1607
page, headers, code = Connect.getPage(url=conf.secondUrl, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
1608
elif kb.secondReq and IPS_WAF_CHECK_PAYLOAD not in _urllib.parse.unquote(value or ""):
1609
def _(value):
1610
if kb.customInjectionMark in (value or ""):
1611
if payload is None:
1612
value = value.replace(kb.customInjectionMark, "")
1613
else:
1614
try:
1615
value = re.sub(r"\w*%s" % re.escape(kb.customInjectionMark), payload, value)
1616
except re.error:
1617
value = re.sub(r"\w*%s" % re.escape(kb.customInjectionMark), re.escape(payload), value)
1618
return value
1619
page, headers, code = Connect.getPage(url=_(kb.secondReq[0]), post=_(kb.secondReq[2]), method=kb.secondReq[1], cookie=kb.secondReq[3], silent=silent, auxHeaders=dict(auxHeaders, **dict(kb.secondReq[4])), response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
1620
1621
threadData.lastQueryDuration = calculateDeltaSeconds(start)
1622
1623
kb.originalCode = code if kb.originalCode is None else kb.originalCode
1624
kb.originalPage = page if kb.originalPage is None else kb.originalPage
1625
1626
if kb.testMode:
1627
kb.testQueryCount += 1
1628
1629
if timeBasedCompare:
1630
return wasLastResponseDelayed()
1631
elif noteResponseTime:
1632
kb.responseTimes.setdefault(kb.responseTimeMode, [])
1633
kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration)
1634
if len(kb.responseTimes[kb.responseTimeMode]) > MAX_TIME_RESPONSES:
1635
kb.responseTimes[kb.responseTimeMode] = kb.responseTimes[kb.responseTimeMode][-MAX_TIME_RESPONSES // 2:]
1636
1637
if not response and removeReflection:
1638
page = removeReflectiveValues(page, payload)
1639
1640
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
1641
1642
message = extractRegexResult(PERMISSION_DENIED_REGEX, page or "", re.I)
1643
if message:
1644
kb.permissionFlag = True
1645
singleTimeWarnMessage("potential permission problems detected ('%s')" % message)
1646
1647
headers = patchHeaders(headers)
1648
1649
if content or response:
1650
return page, headers, code
1651
1652
if getRatioValue:
1653
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
1654
else:
1655
return comparison(page, headers, code, getRatioValue, pageLength)
1656
1657
def setHTTPHandlers(): # Cross-referenced function
1658
raise NotImplementedError
1659
1660