mirror of
https://github.com/JimmXinu/FanFicFare.git
synced 2026-01-07 16:42:51 +01:00
Partial py2 compat--stopped because brotlipython is also py3 only.
This commit is contained in:
parent
2b12dc7054
commit
df4aabc517
5 changed files with 14 additions and 10 deletions
|
|
@ -116,8 +116,13 @@ class FanFictionNetSiteAdapter(BaseSiteAdapter):
|
|||
if not self.getConfig("chrome_cache_path"):
|
||||
raise exceptions.FailedToDownload("FFnet Workaround: chrome_cache_path setting must be set.")
|
||||
self.browser_cache = BrowserCache(self.getConfig("chrome_cache_path"))
|
||||
except PermissionError:
|
||||
raise exceptions.FailedToDownload("Permission to Chrome Cache (%s) denied--Did you quit Chrome?" % self.getConfig("chrome_cache_path"))
|
||||
except (IOError, OSError) as e:
|
||||
# Workaround for PermissionError being py3 only.
|
||||
from errno import EACCES, EPERM, ENOENT
|
||||
if e.errno==EPERM or e.errno==EACCES:
|
||||
raise exceptions.FailedToDownload("Permission to Chrome Cache (%s) denied--Did you quit Chrome?" % self.getConfig("chrome_cache_path"))
|
||||
else:
|
||||
raise
|
||||
logger.debug("Done making self.browser_cache")
|
||||
data = self.browser_cache.get_data(url)
|
||||
if data is None:
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ class BrowserCache:
|
|||
def __init__(self, cache_dir=None):
|
||||
"""Constructor for BrowserCache"""
|
||||
# import of child classes have to be inside the def to avoid circular import error
|
||||
browser_cache_class: BaseBrowserCache
|
||||
for browser_cache_class in [SimpleCache, ChromeDiskCache]:
|
||||
self.browser_cache = browser_cache_class.new_browser_cache(cache_dir)
|
||||
if self.browser_cache is not None:
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class ChromeDiskCache(BaseBrowserCache):
|
|||
|
||||
def __init__(self, cache_dir=None):
|
||||
"""Constructor for ChromeDiskCache"""
|
||||
super().__init__(cache_dir)
|
||||
BaseBrowserCache.__init__(self,cache_dir)
|
||||
if not self.is_cache_dir(cache_dir):
|
||||
raise ChromeDiskCacheException("Directory does not contain a Chrome Disk Cache: '%s'" % cache_dir)
|
||||
self.chromagnon_cache = ChromeCache(cache_dir)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import hashlib
|
|||
import gzip
|
||||
import zlib
|
||||
import glob
|
||||
from typing import cast, Tuple
|
||||
from . import BaseBrowserCache, BrowserCacheException
|
||||
|
||||
import logging
|
||||
|
|
@ -39,7 +38,7 @@ class SimpleCache(BaseBrowserCache):
|
|||
|
||||
def __init__(self, cache_dir=None):
|
||||
"""Constructor for SimpleCache"""
|
||||
super().__init__(cache_dir)
|
||||
BaseBrowserCache.__init__(self,cache_dir)
|
||||
## already called from parent.new_browser_cache()
|
||||
# if not self.is_cache_dir(cache_dir):
|
||||
# raise SimpleCacheException("Directory does not contain a Chrome Simple Cache: '%s'" % cache_dir)
|
||||
|
|
@ -157,8 +156,8 @@ def _get_headers(path):
|
|||
# parse the raw bytes into a HttpHeader structure:
|
||||
# It is a series of null terminated strings, first is status code,e.g., "HTTP/1.1 200"
|
||||
# the rest are name:value pairs used to populate the headers dict.
|
||||
strings: list[str] = entry_file.read(header_size).decode('utf-8').split('\0')
|
||||
headers = dict(cast(Tuple[str, str], s.split(':', 1)) for s in strings[1:] if ':' in s)
|
||||
strings = entry_file.read(header_size).decode('utf-8').split('\0')
|
||||
headers = dict(s.split(':', 1) for s in strings[1:] if ':' in s)
|
||||
return headers
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -73,7 +73,8 @@ from .cacheAddress import CacheAddress
|
|||
from .cacheBlock import CacheBlock
|
||||
from .cacheData import CacheData
|
||||
from .cacheEntry import CacheEntry
|
||||
from six.moves import range
|
||||
from ..six.moves import range
|
||||
from ..six import ensure_binary, ensure_text
|
||||
|
||||
class ChromeCache(object):
|
||||
def __init__(self,path):
|
||||
|
|
@ -85,7 +86,7 @@ class ChromeCache(object):
|
|||
raise Exception("Invalid Index File")
|
||||
|
||||
def get_cache_entry(self,url):
|
||||
url = bytes(url,'utf8')
|
||||
url = ensure_binary(url,'utf8')
|
||||
# Compute the key and seeking to it
|
||||
# print("url:%s"%url)
|
||||
hash = SuperFastHash.superFastHash(url)
|
||||
|
|
|
|||
Loading…
Reference in a new issue