Minor improvements in HashDB

This commit is contained in:
Miroslav Stampar 2025-12-30 12:14:32 +01:00
parent bf2d3a5315
commit 1330198eab
5 changed files with 34 additions and 29 deletions

View file

@ -188,12 +188,12 @@ c4bfb493a03caf84dd362aec7c248097841de804b7413d0e1ecb8a90c8550bc0 lib/core/readl
d1bd70c1a55858495c727fbec91e30af267459c8f64d50fabf9e4ee2c007e920 lib/core/replication.py
1d0f80b0193ac5204527bfab4bde1a7aee0f693fd008e86b4b29f606d1ef94f3 lib/core/revision.py
d2eb8e4b05ac93551272b3d4abfaf5b9f2d3ac92499a7704c16ed0b4f200db38 lib/core/session.py
c42265c888448e115be0ea6ba6fdc86c86cbd00cdbc3a635c21b2a06949920d6 lib/core/settings.py
08714a34dc7fcaab4baef0ee9fad14e76b726d5ac87284d8e9b3d9a818d80090 lib/core/settings.py
1c5eab9494eb969bc9ce118a2ea6954690c6851cbe54c18373c723b99734bf09 lib/core/shell.py
4eea6dcf023e41e3c64b210cb5c2efc7ca893b727f5e49d9c924f076bb224053 lib/core/subprocessng.py
cdd352e1331c6b535e780f6edea79465cb55af53aa2114dcea0e8bf382e56d1a lib/core/target.py
6cf11d8b00fa761046686437fe90565e708809f793e88a3f02527d0e49c4d2a8 lib/core/testing.py
2a179b7601026a8da092271b30ad353cdb6decd658e2614fa51983aaf6dd80e7 lib/core/threads.py
f113732e85962a2522b7ab771295169d63d35b0ee8f1fc455526048d3994d94e lib/core/threads.py
6f61e7946e368ee1450c301aaf5a26381a8ae31fc8bffa28afc9383e8b1fbc3f lib/core/unescaper.py
8919863be7a86f46d2c41bd30c0114a55a55c5931be48e3cfc66dfa96b7109c8 lib/core/update.py
cba481f8c79f4a75bd147b9eb5a1e6e61d70422fceadd12494b1dbaa4f1d27f4 lib/core/wordlist.py
@ -247,7 +247,7 @@ af67d25e8c16b429a5b471d3c629dc1da262262320bf7cd68465d151c02def16 lib/utils/brut
56b93ba38f127929346f54aa75af0db5f46f9502b16acfe0d674a209de6cad2d lib/utils/deps.py
3aca7632d53ab2569ddef876a1b90f244640a53e19b304c77745f8ddb15e6437 lib/utils/getch.py
4979120bbbc030eaef97147ee9d7d564d9683989059b59be317153cdaa23d85b lib/utils/har.py
00135cf61f1cfe79d7be14c526f84a841ad22e736db04e4fe087baeb4c22dc0d lib/utils/hashdb.py
70231961e1d5888efa307552457fe3bc5fdc15e8c93206c1fa05f98e75e5ae5d lib/utils/hashdb.py
8c9caffbd821ad9547c27095c8e55c398ea743b2e44d04b3572e2670389ccf5b lib/utils/hash.py
ba862f0c96b1d39797fb21974599e09690d312b17a85e6639bee9d1db510f543 lib/utils/httpd.py
4608f21a4333c162ab3c266c903fda4793cc5834de30d06affe9b7566dd09811 lib/utils/__init__.py
@ -477,7 +477,7 @@ cbc7684de872fac4baeabd1fce3938bc771316c36e54d69ac6a301e8a99f07b2 plugins/generi
535ab6ac8b8441a3758cee86df3e68abec8b43eee54e32777967252057915acc sqlmapapi.py
168309215af7dd5b0b71070e1770e72f1cbb29a3d8025143fb8aa0b88cd56b62 sqlmapapi.yaml
a40607ce164eb2d21865288d24b863edb1c734b56db857e130ac1aef961c80b9 sqlmap.conf
1beb0711d15e38956759fbffa5331bde763c568a1baa8e32a04ebe5bc7a27e87 sqlmap.py
c3a4c520df0a3396ed9e0f88fea0c9e0f420f779eff7e3d213603bd3f250f927 sqlmap.py
82caac95182ac5cae02eb7d8a2dc07e71389aeae6b838d3d3f402c9597eb086a tamper/0eunion.py
bc8f5e638578919e4e75a5b01a84b47456bac0fd540e600975a52408a3433460 tamper/apostrophemask.py
c9c3d71f11de0140906d7b4f24fadb9926dc8eaf5adab864f8106275f05526ce tamper/apostrophenullencode.py

View file

@ -19,7 +19,7 @@ from lib.core.enums import OS
from thirdparty import six
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.9.12.32"
VERSION = "1.9.12.33"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
@ -703,8 +703,11 @@ FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
GITHUB_REPORT_OAUTH_TOKEN = "wxqc7vTeW8ohIcX+1wK55Mnql2Ex9cP+2s1dqTr/mjlZJVfLnq24fMAi08v5vRvOmuhVZQdOT/lhIRovWvIJrdECD1ud8VMPWpxY+NmjHoEx+VLK1/vCAUBwJe"
# Skip unforced HashDB flush requests below the threshold number of cached items
HASHDB_FLUSH_THRESHOLD = 32
# Flush HashDB threshold number of cached items
HASHDB_FLUSH_THRESHOLD_ITEMS = 200
# Flush HashDB threshold "dirty" time
HASHDB_FLUSH_THRESHOLD_TIME = 5
# Number of retries for unsuccessful HashDB flush attempts
HASHDB_FLUSH_RETRIES = 3

View file

@ -255,7 +255,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
pass
if conf.get("hashDB"):
conf.hashDB.flush(True)
conf.hashDB.flush()
if cleanupFunction:
cleanupFunction()

View file

@ -22,7 +22,8 @@ from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException
from lib.core.settings import HASHDB_END_TRANSACTION_RETRIES
from lib.core.settings import HASHDB_FLUSH_RETRIES
from lib.core.settings import HASHDB_FLUSH_THRESHOLD
from lib.core.settings import HASHDB_FLUSH_THRESHOLD_ITEMS
from lib.core.settings import HASHDB_FLUSH_THRESHOLD_TIME
from lib.core.settings import HASHDB_RETRIEVE_RETRIES
from lib.core.threads import getCurrentThreadData
from lib.core.threads import getCurrentThreadName
@ -34,15 +35,17 @@ class HashDB(object):
self._write_cache = {}
self._cache_lock = threading.Lock()
self._connections = []
self._last_flush_time = time.time()
def _get_cursor(self):
threadData = getCurrentThreadData()
if threadData.hashDBCursor is None:
try:
connection = sqlite3.connect(self.filepath, timeout=3, isolation_level=None)
connection = sqlite3.connect(self.filepath, timeout=3, isolation_level=None, check_same_thread=False)
self._connections.append(connection)
threadData.hashDBCursor = connection.cursor()
threadData.hashDBCursor.execute("PRAGMA journal_mode=WAL")
threadData.hashDBCursor.execute("CREATE TABLE IF NOT EXISTS storage (id INTEGER PRIMARY KEY, value TEXT)")
connection.commit()
except Exception as ex:
@ -86,7 +89,7 @@ class HashDB(object):
def retrieve(self, key, unserialize=False):
retVal = None
if key and (self._write_cache or os.path.isfile(self.filepath)):
if key and (self._write_cache or self._connections or os.path.isfile(self.filepath)):
hash_ = HashDB.hashKey(key)
retVal = self._write_cache.get(hash_)
if not retVal:
@ -123,28 +126,26 @@ class HashDB(object):
def write(self, key, value, serialize=False):
if key:
hash_ = HashDB.hashKey(key)
self._cache_lock.acquire()
self._write_cache[hash_] = getUnicode(value) if not serialize else serializeObject(value)
self._cache_lock.release()
with self._cache_lock:
self._write_cache[hash_] = getUnicode(value) if not serialize else serializeObject(value)
cache_size = len(self._write_cache)
time_since_flush = time.time() - self._last_flush_time
if getCurrentThreadName() in ('0', "MainThread"):
self.flush()
if cache_size >= HASHDB_FLUSH_THRESHOLD_ITEMS or time_since_flush >= HASHDB_FLUSH_THRESHOLD_TIME:
self.flush()
def flush(self, forced=False):
if not self._write_cache:
return
def flush(self):
with self._cache_lock:
if not self._write_cache:
return
if not forced and len(self._write_cache) < HASHDB_FLUSH_THRESHOLD:
return
self._cache_lock.acquire()
_ = self._write_cache
self._write_cache = {}
self._cache_lock.release()
flush_cache = self._write_cache
self._write_cache = {}
self._last_flush_time = time.time()
try:
self.beginTransaction()
for hash_, value in _.items():
for hash_, value in flush_cache.items():
retries = 0
while True:
try:
@ -160,7 +161,8 @@ class HashDB(object):
logger.debug(debugMsg)
break
if retries == 0:
# NOTE: skipping the retries == 0 for graceful resolution of multi-threaded runs
if retries == 1:
warnMsg = "there has been a problem while writing to "
warnMsg += "the session file ('%s')" % getSafeExString(ex)
logger.warning(warnMsg)

View file

@ -588,7 +588,7 @@ def main():
pass
if conf.get("hashDB"):
conf.hashDB.flush(True)
conf.hashDB.flush()
conf.hashDB.close() # NOTE: because of PyPy
if conf.get("harFile"):