diff --git a/fanficfare/adapters/adapter_fanfictionnet.py b/fanficfare/adapters/adapter_fanfictionnet.py index f2c45504..3a9f949b 100644 --- a/fanficfare/adapters/adapter_fanfictionnet.py +++ b/fanficfare/adapters/adapter_fanfictionnet.py @@ -116,8 +116,13 @@ class FanFictionNetSiteAdapter(BaseSiteAdapter): if not self.getConfig("chrome_cache_path"): raise exceptions.FailedToDownload("FFnet Workaround: chrome_cache_path setting must be set.") self.browser_cache = BrowserCache(self.getConfig("chrome_cache_path")) - except PermissionError: - raise exceptions.FailedToDownload("Permission to Chrome Cache (%s) denied--Did you quit Chrome?" % self.getConfig("chrome_cache_path")) + except (IOError, OSError) as e: + # Workaround for PermissionError being py3 only. + from errno import EACCES, EPERM, ENOENT + if e.errno==EPERM or e.errno==EACCES: + raise exceptions.FailedToDownload("Permission to Chrome Cache (%s) denied--Did you quit Chrome?" % self.getConfig("chrome_cache_path")) + else: + raise logger.debug("Done making self.browser_cache") data = self.browser_cache.get_data(url) if data is None: diff --git a/fanficfare/browsercache/__init__.py b/fanficfare/browsercache/__init__.py index e874b726..758ba6d8 100644 --- a/fanficfare/browsercache/__init__.py +++ b/fanficfare/browsercache/__init__.py @@ -9,7 +9,6 @@ class BrowserCache: def __init__(self, cache_dir=None): """Constructor for BrowserCache""" # import of child classes have to be inside the def to avoid circular import error - browser_cache_class: BaseBrowserCache for browser_cache_class in [SimpleCache, ChromeDiskCache]: self.browser_cache = browser_cache_class.new_browser_cache(cache_dir) if self.browser_cache is not None: diff --git a/fanficfare/browsercache/chromediskcache.py b/fanficfare/browsercache/chromediskcache.py index 4b1946b8..71145e22 100644 --- a/fanficfare/browsercache/chromediskcache.py +++ b/fanficfare/browsercache/chromediskcache.py @@ -18,7 +18,7 @@ class ChromeDiskCache(BaseBrowserCache): def __init__(self, cache_dir=None): """Constructor for ChromeDiskCache""" - super().__init__(cache_dir) + BaseBrowserCache.__init__(self,cache_dir) if not self.is_cache_dir(cache_dir): raise ChromeDiskCacheException("Directory does not contain a Chrome Disk Cache: '%s'" % cache_dir) self.chromagnon_cache = ChromeCache(cache_dir) diff --git a/fanficfare/browsercache/simplecache.py b/fanficfare/browsercache/simplecache.py index 198ea5c7..ff33cd41 100644 --- a/fanficfare/browsercache/simplecache.py +++ b/fanficfare/browsercache/simplecache.py @@ -4,7 +4,6 @@ import hashlib import gzip import zlib import glob -from typing import cast, Tuple from . import BaseBrowserCache, BrowserCacheException import logging @@ -39,7 +38,7 @@ class SimpleCache(BaseBrowserCache): def __init__(self, cache_dir=None): """Constructor for SimpleCache""" - super().__init__(cache_dir) + BaseBrowserCache.__init__(self,cache_dir) ## already called from parent.new_browser_cache() # if not self.is_cache_dir(cache_dir): # raise SimpleCacheException("Directory does not contain a Chrome Simple Cache: '%s'" % cache_dir) @@ -157,8 +156,8 @@ def _get_headers(path): # parse the raw bytes into a HttpHeader structure: # It is a series of null terminated strings, first is status code,e.g., "HTTP/1.1 200" # the rest are name:value pairs used to populate the headers dict. - strings: list[str] = entry_file.read(header_size).decode('utf-8').split('\0') - headers = dict(cast(Tuple[str, str], s.split(':', 1)) for s in strings[1:] if ':' in s) + strings = entry_file.read(header_size).decode('utf-8').split('\0') + headers = dict(s.split(':', 1) for s in strings[1:] if ':' in s) return headers diff --git a/fanficfare/chromagnon/cacheParse.py b/fanficfare/chromagnon/cacheParse.py index 0d35b495..867b9409 100644 --- a/fanficfare/chromagnon/cacheParse.py +++ b/fanficfare/chromagnon/cacheParse.py @@ -73,7 +73,8 @@ from .cacheAddress import CacheAddress from .cacheBlock import CacheBlock from .cacheData import CacheData from .cacheEntry import CacheEntry -from six.moves import range +from ..six.moves import range +from ..six import ensure_binary, ensure_text class ChromeCache(object): def __init__(self,path): @@ -85,7 +86,7 @@ class ChromeCache(object): raise Exception("Invalid Index File") def get_cache_entry(self,url): - url = bytes(url,'utf8') + url = ensure_binary(url,'utf8') # Compute the key and seeking to it # print("url:%s"%url) hash = SuperFastHash.superFastHash(url)