diff --git a/lib/core/settings.py b/lib/core/settings.py index 736e7d1db..43fc75d3b 100644 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -18,7 +18,7 @@ from lib.core.enums import OS from thirdparty.six import unichr as _unichr # sqlmap version (...) -VERSION = "1.3.11.23" +VERSION = "1.3.11.24" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) diff --git a/lib/core/threads.py b/lib/core/threads.py index 7b860d185..8f163a637 100644 --- a/lib/core/threads.py +++ b/lib/core/threads.py @@ -123,7 +123,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio kb.threadException = False kb.technique = ThreadData.technique - if threadChoice and numThreads == 1 and not (kb.injection.data and not any(_ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in kb.injection.data)): + if threadChoice and conf.threads == numThreads == 1 and not (kb.injection.data and not any(_ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in kb.injection.data)): while True: message = "please enter number of threads? [Enter for %d (current)] " % numThreads choice = readInput(message, default=str(numThreads)) diff --git a/lib/request/connect.py b/lib/request/connect.py index 31666e3dc..3594e0d33 100644 --- a/lib/request/connect.py +++ b/lib/request/connect.py @@ -698,7 +698,7 @@ class Connect(object): else: raise SqlmapConnectionException(warnMsg) else: - debugMsg = "got HTTP error code: %d (%s)" % (code, status) + debugMsg = "got HTTP error code: %d ('%s')" % (code, status) logger.debug(debugMsg) except (_urllib.error.URLError, socket.error, socket.timeout, _http_client.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError, OverflowError): diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index 0d9530359..5079c3381 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -117,7 +117,7 @@ def crawl(target): if (extractRegexResult(r"\A[^?]+\.(?P\w+)(\?|\Z)", url) or "").lower() not in CRAWL_EXCLUDE_EXTENSIONS: with kb.locks.value: threadData.shared.deeper.add(url) - if re.search(r"(.*?)\?(.+)", url) and not re.search(r"\?(v=)?\d+\Z", url): + if re.search(r"(.*?)\?(.+)", url) and not re.search(r"\?(v=)?\d+\Z", url) and not re.search(r"(?i)\.(js|css)(\?|\Z)", url): threadData.shared.value.add(url) except UnicodeEncodeError: # for non-HTML files pass