From 7b97439fcded2ff19cf8160399bca5c381dffde0 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 25 Jul 2018 15:06:06 -0500
Subject: [PATCH 001/120] Working towards python 2.7 & 3 cross compatibility.
---
fanficfare/adapters/__init__.py | 309 +++++++++---------
fanficfare/cli3.py | 553 ++++++++++++++++++++++++++++++++
fanficfare/configurable.py | 36 ++-
fanficfare/gziphttp.py | 6 +-
4 files changed, 731 insertions(+), 173 deletions(-)
create mode 100644 fanficfare/cli3.py
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index 78e47c09..cae72b6f 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,168 +15,171 @@
# limitations under the License.
#
+from __future__ import absolute_import
import os, re, sys, glob, types
from os.path import dirname, basename, normpath
import logging
-import urlparse as up
+from six.moves.urllib.parse import urlparse as up
logger = logging.getLogger(__name__)
+print(sys.path)
+from fanficfare.configurable import Configuration
from .. import exceptions as exceptions
-from ..configurable import Configuration
+
## must import each adapter here.
-import adapter_test1
-import adapter_fanfictionnet
-import adapter_fanficcastletvnet
-import adapter_fictionalleyorg
-import adapter_fictionpresscom
-import adapter_ficwadcom
-import adapter_fimfictionnet
-import adapter_mediaminerorg
-import adapter_potionsandsnitches
-import adapter_tenhawkpresentscom
-import adapter_adastrafanficcom
-import adapter_tthfanficorg
-import adapter_twilightednet
-import adapter_whoficcom
-import adapter_siyecouk
-import adapter_archiveofourownorg
-import adapter_ficbooknet
-import adapter_nfacommunitycom
-import adapter_midnightwhispers
-import adapter_ksarchivecom
-import adapter_archiveskyehawkecom
-import adapter_squidgeorgpeja
-import adapter_libraryofmoriacom
-import adapter_wraithbaitcom
-import adapter_dramioneorg
-import adapter_ashwindersycophanthexcom
-import adapter_chaossycophanthexcom
-import adapter_erosnsapphosycophanthexcom
-import adapter_lumossycophanthexcom
-import adapter_occlumencysycophanthexcom
-import adapter_phoenixsongnet
-import adapter_walkingtheplankorg
-import adapter_dokugacom
-import adapter_iketernalnet
-import adapter_storiesofardacom
-import adapter_destinysgatewaycom
-import adapter_ncisfictioncom
-import adapter_fanfiktionde
-import adapter_ponyfictionarchivenet
-import adapter_ncisficcom
-import adapter_nationallibrarynet
-import adapter_themasquenet
-import adapter_pretendercentrecom
-import adapter_darksolaceorg
-import adapter_finestoriescom
-import adapter_hpfanficarchivecom
-import adapter_twilightarchivescom
-import adapter_nhamagicalworldsus
-import adapter_hlfictionnet
-import adapter_dracoandginnycom
-import adapter_scarvesandcoffeenet
-import adapter_thepetulantpoetesscom
-import adapter_wolverineandroguecom
-import adapter_merlinficdtwinscouk
-import adapter_thehookupzonenet
-import adapter_bloodtiesfancom
-import adapter_qafficcom
-import adapter_efpfanficnet
-import adapter_potterficscom
-import adapter_efictionestelielde
-import adapter_imagineeficcom
-import adapter_asr3slashzoneorg
-import adapter_potterheadsanonymouscom
-import adapter_fictionpadcom
-import adapter_storiesonlinenet
-import adapter_trekiverseorg
-import adapter_literotica
-import adapter_voracity2eficcom
-import adapter_spikeluvercom
-import adapter_bloodshedversecom
-import adapter_nocturnallightnet
-import adapter_fanfichu
-import adapter_fictionmaniatv
-import adapter_tolkienfanfiction
-import adapter_themaplebookshelf
-import adapter_fannation
-import adapter_sheppardweircom
-import adapter_samandjacknet
-import adapter_csiforensicscom
-import adapter_lotrfanfictioncom
-import adapter_fhsarchivecom
-import adapter_fanfictionjunkiesde
-import adapter_tgstorytimecom
-import adapter_itcouldhappennet
-import adapter_forumsspacebattlescom
-import adapter_forumssufficientvelocitycom
-import adapter_forumquestionablequestingcom
-import adapter_ninelivesarchivecom
-import adapter_masseffect2in
-import adapter_quotevcom
-import adapter_mcstoriescom
-import adapter_buffygilescom
-import adapter_andromedawebcom
-import adapter_artemisfowlcom
-import adapter_naiceanilmenet
-import adapter_deepinmysoulnet
-import adapter_kiarepositorymujajinet
-import adapter_adultfanfictionorg
-import adapter_fictionhuntcom
-import adapter_royalroadl
-import adapter_chosentwofanficcom
-import adapter_bdsmlibrarycom
-import adapter_asexstoriescom
-import adapter_gluttonyfictioncom
-import adapter_valentchambercom
-import adapter_looselugscom
-import adapter_wwwgiantessworldnet
-import adapter_lotrgficcom
-import adapter_tomparisdormcom
-import adapter_writingwhimsicalwanderingsnet
-import adapter_sugarquillnet
-import adapter_wwwarea52hkhnet
-import adapter_starslibrarynet
-import adapter_fanficauthorsnet
-import adapter_fireflyfansnet
-import adapter_fireflypopulliorg
-import adapter_sebklainenet
-import adapter_shriftweborgbfa
-import adapter_trekfanfictionnet
-import adapter_wuxiaworldcom
-import adapter_wwwlushstoriescom
-import adapter_wwwutopiastoriescom
-import adapter_sinfuldreamscomunicornfic
-import adapter_sinfuldreamscomwhisperedmuse
-import adapter_sinfuldreamscomwickedtemptation
-import adapter_asianfanficscom
-import adapter_webnovelcom
-import adapter_deandamagecom
-import adapter_imrightbehindyoucom
-import adapter_mttjustoncenet
-import adapter_narutoficorg
-import adapter_starskyhutcharchivenet
-import adapter_swordborderlineangelcom
-import adapter_tasteofpoisoninkubationnet
-import adapter_thebrokenworldorg
-import adapter_thedelphicexpansecom
-import adapter_thundercatsfansorg
-import adapter_unknowableroomorg
-import adapter_www13hoursorg
-import adapter_wwwaneroticstorycom
-import adapter_gravitytalescom
-import adapter_lcfanficcom
-import adapter_noveltrovecom
-import adapter_inkbunnynet
-import adapter_alternatehistorycom
-import adapter_wattpadcom
-import adapter_lightnovelgatecom
-import adapter_wwwnovelallcom
-import adapter_wuxiaworldco
-import adapter_harrypotterfanfictioncom
+from . import adapter_test1
+# import adapter_fanfictionnet
+# import adapter_fanficcastletvnet
+# import adapter_fictionalleyorg
+# import adapter_fictionpresscom
+# import adapter_ficwadcom
+# import adapter_fimfictionnet
+# import adapter_mediaminerorg
+# import adapter_potionsandsnitches
+# import adapter_tenhawkpresentscom
+# import adapter_adastrafanficcom
+# import adapter_tthfanficorg
+# import adapter_twilightednet
+# import adapter_whoficcom
+# import adapter_siyecouk
+# import adapter_archiveofourownorg
+# import adapter_ficbooknet
+# import adapter_nfacommunitycom
+# import adapter_midnightwhispers
+# import adapter_ksarchivecom
+# import adapter_archiveskyehawkecom
+# import adapter_squidgeorgpeja
+# import adapter_libraryofmoriacom
+# import adapter_wraithbaitcom
+# import adapter_dramioneorg
+# import adapter_ashwindersycophanthexcom
+# import adapter_chaossycophanthexcom
+# import adapter_erosnsapphosycophanthexcom
+# import adapter_lumossycophanthexcom
+# import adapter_occlumencysycophanthexcom
+# import adapter_phoenixsongnet
+# import adapter_walkingtheplankorg
+# import adapter_dokugacom
+# import adapter_iketernalnet
+# import adapter_storiesofardacom
+# import adapter_destinysgatewaycom
+# import adapter_ncisfictioncom
+# import adapter_fanfiktionde
+# import adapter_ponyfictionarchivenet
+# import adapter_ncisficcom
+# import adapter_nationallibrarynet
+# import adapter_themasquenet
+# import adapter_pretendercentrecom
+# import adapter_darksolaceorg
+# import adapter_finestoriescom
+# import adapter_hpfanficarchivecom
+# import adapter_twilightarchivescom
+# import adapter_nhamagicalworldsus
+# import adapter_hlfictionnet
+# import adapter_dracoandginnycom
+# import adapter_scarvesandcoffeenet
+# import adapter_thepetulantpoetesscom
+# import adapter_wolverineandroguecom
+# import adapter_merlinficdtwinscouk
+# import adapter_thehookupzonenet
+# import adapter_bloodtiesfancom
+# import adapter_qafficcom
+# import adapter_efpfanficnet
+# import adapter_potterficscom
+# import adapter_efictionestelielde
+# import adapter_imagineeficcom
+# import adapter_asr3slashzoneorg
+# import adapter_potterheadsanonymouscom
+# import adapter_fictionpadcom
+# import adapter_storiesonlinenet
+# import adapter_trekiverseorg
+# import adapter_literotica
+# import adapter_voracity2eficcom
+# import adapter_spikeluvercom
+# import adapter_bloodshedversecom
+# import adapter_nocturnallightnet
+# import adapter_fanfichu
+# import adapter_fictionmaniatv
+# import adapter_tolkienfanfiction
+# import adapter_themaplebookshelf
+# import adapter_fannation
+# import adapter_sheppardweircom
+# import adapter_samandjacknet
+# import adapter_csiforensicscom
+# import adapter_lotrfanfictioncom
+# import adapter_fhsarchivecom
+# import adapter_fanfictionjunkiesde
+# import adapter_tgstorytimecom
+# import adapter_itcouldhappennet
+# import adapter_forumsspacebattlescom
+# import adapter_forumssufficientvelocitycom
+# import adapter_forumquestionablequestingcom
+# import adapter_ninelivesarchivecom
+# import adapter_masseffect2in
+# import adapter_quotevcom
+# import adapter_mcstoriescom
+# import adapter_buffygilescom
+# import adapter_andromedawebcom
+# import adapter_artemisfowlcom
+# import adapter_naiceanilmenet
+# import adapter_deepinmysoulnet
+# import adapter_kiarepositorymujajinet
+# import adapter_adultfanfictionorg
+# import adapter_fictionhuntcom
+# import adapter_royalroadl
+# import adapter_chosentwofanficcom
+# import adapter_bdsmlibrarycom
+# import adapter_asexstoriescom
+# import adapter_gluttonyfictioncom
+# import adapter_valentchambercom
+# import adapter_looselugscom
+# import adapter_wwwgiantessworldnet
+# import adapter_lotrgficcom
+# import adapter_tomparisdormcom
+# import adapter_writingwhimsicalwanderingsnet
+# import adapter_sugarquillnet
+# import adapter_wwwarea52hkhnet
+# import adapter_starslibrarynet
+# import adapter_fanficauthorsnet
+# import adapter_fireflyfansnet
+# import adapter_fireflypopulliorg
+# import adapter_sebklainenet
+# import adapter_shriftweborgbfa
+# import adapter_trekfanfictionnet
+# import adapter_wuxiaworldcom
+# import adapter_wwwlushstoriescom
+# import adapter_wwwutopiastoriescom
+# import adapter_sinfuldreamscomunicornfic
+# import adapter_sinfuldreamscomwhisperedmuse
+# import adapter_sinfuldreamscomwickedtemptation
+# import adapter_asianfanficscom
+# import adapter_webnovelcom
+# import adapter_deandamagecom
+# import adapter_imrightbehindyoucom
+# import adapter_mttjustoncenet
+# import adapter_narutoficorg
+# import adapter_starskyhutcharchivenet
+# import adapter_swordborderlineangelcom
+# import adapter_tasteofpoisoninkubationnet
+# import adapter_thebrokenworldorg
+# import adapter_thedelphicexpansecom
+# import adapter_thundercatsfansorg
+# import adapter_unknowableroomorg
+# import adapter_www13hoursorg
+# import adapter_wwwaneroticstorycom
+# import adapter_gravitytalescom
+# import adapter_lcfanficcom
+# import adapter_noveltrovecom
+# import adapter_inkbunnynet
+# import adapter_alternatehistorycom
+# import adapter_wattpadcom
+# import adapter_lightnovelgatecom
+# import adapter_wwwnovelallcom
+# import adapter_wuxiaworldco
+# import adapter_harrypotterfanfictioncom
## This bit of complexity allows adapters to be added by just adding
## importing. It eliminates the long if/else clauses we used to need
diff --git a/fanficfare/cli3.py b/fanficfare/cli3.py
new file mode 100644
index 00000000..f7ea7dc1
--- /dev/null
+++ b/fanficfare/cli3.py
@@ -0,0 +1,553 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from optparse import OptionParser, SUPPRESS_HELP
+from os.path import expanduser, join, dirname
+from os import access, R_OK
+from subprocess import call
+from six import StringIO
+import six.moves.configparser
+import getpass
+import logging
+import pprint
+import string
+import sys
+
+import pickle
+import http.cookiejar as cl
+
+version="2.27.8"
+
+if sys.version_info < (3,0):
+ print('this program requires python 3 or newer.')
+ sys.exit(1)
+
+# if sys.version_info >= (2, 7):
+# # suppresses default logger. logging is setup in fanficfare/__init__.py so it works in calibre, too.
+# rootlogger = logging.getlogger()
+# loghandler = logging.nullhandler()
+# loghandler.setformatter(logging.formatter('(=====)(levelname)s:%(message)s'))
+# rootlogger.addhandler(loghandler)
+
+logger = logging.getLogger('fanficfare')
+
+try:
+ # running under calibre
+ from calibre_plugins.fanficfare_plugin.fanficfare import adapters, writers, exceptions
+ from calibre_plugins.fanficfare_plugin.fanficfare.configurable import configuration
+ from calibre_plugins.fanficfare_plugin.fanficfare.epubutils import (
+ get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
+ from calibre_plugins.fanficfare_plugin.fanficfare.geturls import get_urls_from_page, get_urls_from_imap
+except ImportError:
+ from fanficfare import adapters, writers, exceptions
+ from fanficfare.configurable import configuration
+ from fanficfare.epubutils import (
+ get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
+ from fanficfare.geturls import get_urls_from_page, get_urls_from_imap
+
+
+def write_story(config, adapter, writeformat, metaonly=false, outstream=none):
+ writer = writers.getwriter(writeformat, config, adapter)
+ writer.writestory(outstream=outstream, metaonly=metaonly)
+ output_filename = writer.getoutputfilename()
+ del writer
+ return output_filename
+
+def main(argv=none,
+ parser=none,
+ passed_defaultsini=none,
+ passed_personalini=none):
+ if argv is none:
+ argv = sys.argv[1:]
+ # read in args, anything starting with -- will be treated as --=
+ if not parser:
+ parser = optionparser('usage: %prog [options] [storyurl]...')
+ parser.add_option('-f', '--format', dest='format', default='epub',
+ help='write story as format, epub(default), mobi, txt or html', metavar='format')
+ if passed_defaultsini:
+ config_help = 'read config from specified file(s) in addition to calibre plugin personal.ini, ~/.fanficfare/personal.ini, and ./personal.ini'
+ else:
+ config_help = 'read config from specified file(s) in addition to ~/.fanficfare/defaults.ini, ~/.fanficfare/personal.ini, ./defaults.ini, and ./personal.ini'
+ parser.add_option('-c', '--config',
+ action='append', dest='configfile', default=none,
+ help=config_help, metavar='config')
+ range_help = ' --begin and --end will be overridden by a chapter range on the storyurl like storyurl[1-2], storyurl[-3], storyurl[3-] or storyurl[3]'
+ parser.add_option('-b', '--begin', dest='begin', default=none,
+ help='begin with chapter start.'+range_help, metavar='start')
+ parser.add_option('-e', '--end', dest='end', default=none,
+ help='end with chapter end.'+range_help, metavar='end')
+ parser.add_option('-o', '--option',
+ action='append', dest='options',
+ help='set an option name=value', metavar='name=value')
+ parser.add_option('-m', '--meta-only',
+ action='store_true', dest='metaonly',
+ help='retrieve metadata and stop. or, if --update-epub, update metadata title page only.', )
+ parser.add_option('--json-meta',
+ action='store_true', dest='jsonmeta',
+ help='when used with --meta-only, output metadata as json. no effect without --meta-only flag', )
+ parser.add_option('-u', '--update-epub',
+ action='store_true', dest='update',
+ help='update an existing epub(if present) with new chapters. give either epub filename or story url.', )
+ parser.add_option('--update-cover',
+ action='store_true', dest='updatecover',
+ help='update cover in an existing epub, otherwise existing cover (if any) is used on update. only valid with --update-epub.', )
+ parser.add_option('--unnew',
+ action='store_true', dest='unnew',
+ help='remove (new) chapter marks left by mark_new_chapters setting.', )
+ parser.add_option('--force',
+ action='store_true', dest='force',
+ help='force overwrite of an existing epub, download and overwrite all chapters.', )
+ parser.add_option('-i', '--infile',
+ help='give a filename to read for urls (and/or existing epub files with --update-epub).',
+ dest='infile', default=none,
+ metavar='infile')
+
+ parser.add_option('-l', '--list',
+ dest='list', default=none, metavar='url',
+ help='get list of valid story urls from page given.', )
+ parser.add_option('-n', '--normalize-list',
+ dest='normalize', default=none, metavar='url',
+ help='get list of valid story urls from page given, but normalized to standard forms.', )
+ parser.add_option('--download-list',
+ dest='downloadlist', default=none, metavar='url',
+ help='download story urls retrieved from page given. update existing epubs if used with --update-epub.', )
+
+ parser.add_option('--imap',
+ action='store_true', dest='imaplist',
+ help='get list of valid story urls from unread email from imap account configured in ini.', )
+
+ parser.add_option('--download-imap',
+ action='store_true', dest='downloadimap',
+ help='download valid story urls from unread email from imap account configured in ini. update existing epubs if used with --update-epub.', )
+
+ parser.add_option('-s', '--sites-list',
+ action='store_true', dest='siteslist', default=false,
+ help='get list of valid story urls examples.', )
+ parser.add_option('--non-interactive',
+ action='store_false', dest='interactive', default=sys.stdin.isatty() and sys.stdout.isatty(),
+ help='prevent interactive prompts (for scripting).', )
+ parser.add_option('-d', '--debug',
+ action='store_true', dest='debug',
+ help='show debug and notice output.', )
+ parser.add_option('-p', '--progressbar',
+ action='store_true', dest='progressbar',
+ help='display a simple progress bar while downloading--one dot(.) per network fetch.', )
+ parser.add_option('-v', '--version',
+ action='store_true', dest='version',
+ help='display version and quit.', )
+
+ ## undocumented feature for development use. save page cache and
+ ## cookies between runs. saves in pwd as files global_cache and
+ ## global_cookies
+ parser.add_option('--save-cache', '--save_cache',
+ action='store_true', dest='save_cache',
+ help=suppress_help, )
+
+ options, args = parser.parse_args(argv)
+
+ if options.version:
+ print("version: %s" % version)
+ return
+
+ if not options.debug:
+ logger.setlevel(logging.warning)
+
+ list_only = any((options.imaplist,
+ options.siteslist,
+ options.list,
+ options.normalize,
+ ))
+
+ if list_only and (args or any((options.downloadimap,
+ options.downloadlist))):
+ parser.error('incorrect arguments: cannot download and list urls at the same time.')
+
+ if options.siteslist:
+ for site, examples in adapters.getsiteexamples():
+ print('\n#### %s\nexample urls:' % site)
+ for u in examples:
+ print(' * %s' % u)
+ return
+
+ if options.update and options.format != 'epub':
+ parser.error('-u/--update-epub only works with epub')
+
+ if options.unnew and options.format != 'epub':
+ parser.error('--unnew only works with epub')
+
+ urls=args
+
+ if not list_only and not (args or any((options.infile,
+ options.downloadimap,
+ options.downloadlist))):
+ parser.print_help();
+ return
+
+ if options.list:
+ configuration = get_configuration(options.list,
+ passed_defaultsini,
+ passed_personalini,options)
+ retlist = get_urls_from_page(options.list, configuration)
+ print('\n'.join(retlist))
+
+ if options.normalize:
+ configuration = get_configuration(options.normalize,
+ passed_defaultsini,
+ passed_personalini,options)
+ retlist = get_urls_from_page(options.normalize, configuration,normalize=true)
+ print('\n'.join(retlist))
+
+ if options.downloadlist:
+ configuration = get_configuration(options.downloadlist,
+ passed_defaultsini,
+ passed_personalini,options)
+ retlist = get_urls_from_page(options.downloadlist, configuration)
+ urls.extend(retlist)
+
+ if options.imaplist or options.downloadimap:
+ # list doesn't have a supported site.
+ configuration = get_configuration('test1.com',passed_defaultsini,passed_personalini,options)
+ markread = configuration.getconfig('imap_mark_read') == 'true' or \
+ (configuration.getconfig('imap_mark_read') == 'downloadonly' and options.downloadimap)
+ retlist = get_urls_from_imap(configuration.getconfig('imap_server'),
+ configuration.getconfig('imap_username'),
+ configuration.getconfig('imap_password'),
+ configuration.getconfig('imap_folder'),
+ markread)
+
+ if options.downloadimap:
+ urls.extend(retlist)
+ else:
+ print('\n'.join(retlist))
+
+ # for passing in a file list
+ if options.infile:
+ with open(options.infile,"r") as infile:
+ #print("file exists and is readable")
+ for url in infile:
+ if '#' in url:
+ url = url[:url.find('#')].strip()
+ url = url.strip()
+ if len(url) > 0:
+ #print("url: (%s)"%url)
+ urls.append(url)
+
+ if options.save_cache:
+ try:
+ with open('global_cache','rb') as jin:
+ options.pagecache = pickle.load(jin) # ,encoding="utf-8"
+ options.cookiejar = cl.lwpcookiejar()
+ options.cookiejar.load('global_cookies')
+ except:
+ print("didn't load global_cache")
+
+ if not list_only:
+ if len(urls) < 1:
+ print("no valid story urls found")
+ else:
+ for url in urls:
+ try:
+ do_download(url,
+ options,
+ passed_defaultsini,
+ passed_personalini)
+ #print("pagecache:%s"%options.pagecache.keys())
+ except exception as e:
+ if len(urls) == 1:
+ raise
+ print("url(%s) failed: exception (%s). run url individually for more detail."%(url,e))
+
+ if options.save_cache:
+ with open('global_cache','wb') as jout:
+ pickle.dump(options.pagecache,jout)
+ options.cookiejar.save('global_cookies')
+
+# make rest a function and loop on it.
+def do_download(arg,
+ options,
+ passed_defaultsini,
+ passed_personalini):
+
+ # attempt to update an existing epub.
+ chaptercount = none
+ output_filename = none
+
+ if options.unnew:
+ # remove mark_new_chapters marks
+ reset_orig_chapters_epub(arg,arg)
+ return
+
+ if options.update:
+ try:
+ url, chaptercount = get_dcsource_chaptercount(arg)
+ if not url:
+ print('no story url found in epub to update.')
+ return
+ print('updating %s, url: %s' % (arg, url))
+ output_filename = arg
+ except exception:
+ # if there's an error reading the update file, maybe it's a url?
+ # we'll look for an existing outputfile down below.
+ url = arg
+ else:
+ url = arg
+
+ configuration = get_configuration(url,
+ passed_defaultsini,
+ passed_personalini,
+ options,
+ chaptercount,
+ output_filename)
+
+ try:
+ # allow chapter range with url.
+ # like test1.com?sid=5[4-6] or [4,6]
+ # overrides cli options if present.
+ url,ch_begin,ch_end = adapters.get_url_chapter_range(url)
+
+ adapter = adapters.getadapter(configuration, url)
+
+ ## share pagecache and cookiejar between multiple downloads.
+ if not hasattr(options,'pagecache'):
+ options.pagecache = configuration.get_empty_pagecache()
+ if not hasattr(options,'cookiejar'):
+ options.cookiejar = configuration.get_empty_cookiejar()
+ configuration.set_pagecache(options.pagecache)
+ configuration.set_cookiejar(options.cookiejar)
+
+ # url[begin-end] overrides cli option if present.
+ if ch_begin or ch_end:
+ adapter.setchaptersrange(ch_begin, ch_end)
+ else:
+ adapter.setchaptersrange(options.begin, options.end)
+
+ # check for updating from url (vs from file)
+ if options.update and not chaptercount:
+ try:
+ writer = writers.getwriter('epub', configuration, adapter)
+ output_filename = writer.getoutputfilename()
+ noturl, chaptercount = get_dcsource_chaptercount(output_filename)
+ print('updating %s, url: %s' % (output_filename, url))
+ except exception:
+ options.update = false
+ pass
+
+ # check for include_images without no_image_processing. in absence of pil, give warning.
+ if adapter.getconfig('include_images') and not adapter.getconfig('no_image_processing'):
+ try:
+ from calibre.utils.magick import image
+ except importerror:
+ try:
+ ## pillow is a more current fork of pil library
+ from pil import image
+ except importerror:
+ try:
+ import image
+ except importerror:
+ print("you have include_images enabled, but python image library(pil) isn't found.\nimages will be included full size in original format.\ncontinue? (y/n)?")
+ if options.interactive:
+ if not sys.stdin.readline().strip().lower().startswith('y'):
+ return
+ else:
+ # for non-interactive, default the response to yes and continue processing
+ print('y')
+
+ # three tries, that's enough if both user/pass & is_adult needed,
+ # or a couple tries of one or the other
+ for x in range(0, 2):
+ try:
+ adapter.getstorymetadataonly()
+ except exceptions.failedtologin as f:
+ if not options.interactive:
+ print('login failed on non-interactive process. set username and password in personal.ini.')
+ return
+ if f.passwdonly:
+ print('story requires a password.')
+ else:
+ print('login failed, need username/password.')
+ sys.stdout.write('username: ')
+ adapter.username = sys.stdin.readline().strip()
+ adapter.password = getpass.getpass(prompt='password: ')
+ # print('login: `%s`, password: `%s`' % (adapter.username, adapter.password))
+ except exceptions.adultcheckrequired:
+ if options.interactive:
+ print('please confirm you are an adult in your locale: (y/n)?')
+ if sys.stdin.readline().strip().lower().startswith('y'):
+ adapter.is_adult = true
+ else:
+ print('adult check required on non-interactive process. set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.')
+ return
+
+ if options.update and not options.force:
+ urlchaptercount = int(adapter.getstorymetadataonly().getmetadata('numchapters').replace(',',''))
+ # returns int adjusted for start-end range.
+ urlchaptercount = adapter.getstorymetadataonly().getchaptercount()
+
+ if chaptercount == urlchaptercount and not options.metaonly:
+ print('%s already contains %d chapters.' % (output_filename, chaptercount))
+ elif chaptercount > urlchaptercount:
+ print('%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount))
+ elif chaptercount == 0:
+ print("%s doesn't contain any recognizable chapters, probably from a different source. not updating." % output_filename)
+ else:
+ # update now handled by pre-populating the old
+ # images and chapters in the adapter rather than
+ # merging epubs.
+ (url,
+ chaptercount,
+ adapter.oldchapters,
+ adapter.oldimgs,
+ adapter.oldcover,
+ adapter.calibrebookmark,
+ adapter.logfile,
+ adapter.oldchaptersmap,
+ adapter.oldchaptersdata) = (get_update_data(output_filename))[0:9]
+
+ print('do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount))
+
+ if not options.update and chaptercount == urlchaptercount and adapter.getconfig('do_update_hook'):
+ adapter.hookforupdates(chaptercount)
+
+ if adapter.getconfig('pre_process_safepattern'):
+ metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getconfig('pre_process_safepattern'))
+ else:
+ metadata = adapter.story.getallmetadata()
+ call(string.template(adapter.getconfig('pre_process_cmd')).substitute(metadata), shell=true)
+
+ write_story(configuration, adapter, 'epub')
+
+ else:
+ # regular download
+ if options.metaonly:
+ metadata = adapter.getstorymetadataonly().getallmetadata()
+ metadata['zchapters'] = []
+ for i, chap in enumerate(adapter.get_chapters()):
+ metadata['zchapters'].append((i+1,chap))
+
+ if not options.metaonly and adapter.getconfig('pre_process_cmd'):
+ if adapter.getconfig('pre_process_safepattern'):
+ metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getconfig('pre_process_safepattern'))
+ else:
+ metadata = adapter.story.getallmetadata()
+ call(string.template(adapter.getconfig('pre_process_cmd')).substitute(metadata), shell=true)
+
+ output_filename = write_story(configuration, adapter, options.format, options.metaonly)
+
+ if options.metaonly:
+ metadata['output_filename'] = output_filename
+ if options.jsonmeta:
+ import json
+ print(json.dumps(metadata, sort_keys=true,
+ indent=2, separators=(',', ':')))
+ else:
+ pprint.pprint(metadata)
+
+ if not options.metaonly and adapter.getconfig('post_process_cmd'):
+ if adapter.getconfig('post_process_safepattern'):
+ metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getconfig('post_process_safepattern'))
+ else:
+ metadata = adapter.story.getallmetadata()
+ metadata['output_filename'] = output_filename
+ call(string.template(adapter.getconfig('post_process_cmd')).substitute(metadata), shell=true)
+
+ del adapter
+
+ except exceptions.invalidstoryurl as isu:
+ print(isu)
+ except exceptions.storydoesnotexist as dne:
+ print(dne)
+ except exceptions.unknownsite as us:
+ print(us)
+ except exceptions.accessdenied as ad:
+ print(ad)
+
+def get_configuration(url,
+ passed_defaultsini,
+ passed_personalini,
+ options,
+ chaptercount=none,
+ output_filename=none):
+ try:
+ configuration = configuration(adapters.getconfigsectionsfor(url), options.format)
+ except exceptions.unknownsite as e:
+ if options.list or options.normalize or options.downloadlist:
+ # list for page doesn't have to be a supported site.
+ configuration = configuration(['unknown'], options.format)
+ else:
+ raise e
+
+ conflist = []
+ homepath = join(expanduser('~'), '.fanficdownloader')
+ ## also look for .fanficfare now, give higher priority than old dir.
+ homepath2 = join(expanduser('~'), '.fanficfare')
+
+ if passed_defaultsini:
+ # new stringio each time rather than pass stringio and rewind
+ # for case of list download. just makes more sense to me.
+ configuration.readfp(stringio(passed_defaultsini))
+ else:
+ # don't need to check existance for our selves.
+ conflist.append(join(dirname(__file__), 'defaults.ini'))
+ conflist.append(join(homepath, 'defaults.ini'))
+ conflist.append(join(homepath2, 'defaults.ini'))
+ conflist.append('defaults.ini')
+
+ if passed_personalini:
+ # new stringio each time rather than pass stringio and rewind
+ # for case of list download. just makes more sense to me.
+ configuration.readfp(stringio(passed_personalini))
+
+ conflist.append(join(homepath, 'personal.ini'))
+ conflist.append(join(homepath2, 'personal.ini'))
+ conflist.append('personal.ini')
+
+ if options.configfile:
+ conflist.extend(options.configfile)
+
+ configuration.read(conflist)
+
+ try:
+ configuration.add_section('overrides')
+ except configparser.DuplicateSectionError:
+ pass
+
+ if options.force:
+ configuration.set('overrides', 'always_overwrite', 'true')
+
+ if options.update and chaptercount and output_filename:
+ configuration.set('overrides', 'output_filename', output_filename)
+
+ if options.update and not options.updatecover:
+ configuration.set('overrides', 'never_make_cover', 'true')
+
+ # images only for epub, even if the user mistakenly turned it
+ # on else where.
+ if options.format not in ('epub', 'html'):
+ configuration.set('overrides', 'include_images', 'false')
+
+ if options.options:
+ for opt in options.options:
+ (var, val) = opt.split('=')
+ configuration.set('overrides', var, val)
+
+ if options.progressbar:
+ configuration.set('overrides','progressbar','true')
+
+ return configuration
+
+if __name__ == '__main__':
+ main()
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 9f601250..fa3408ee 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -15,18 +15,20 @@
# limitations under the License.
#
-import ConfigParser, re
+import re
import exceptions
import codecs
-from ConfigParser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
+
+import six.moves.configparser as ConfigParser
+from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
+from six.moves import urllib
+from six.moves import urllib as u2
+from six.moves.urllib.parse import urlparse as up
+from six.moves import http_cookiejar as cl
import time
import logging
import sys
-import urllib
-import urllib2 as u2
-import urlparse as up
-import cookielib as cl
import pickle
try:
@@ -74,7 +76,7 @@ import adapters
def re_compile(regex,line):
try:
return re.compile(regex,re.DOTALL)
- except Exception, e:
+ except Exception as e:
raise exceptions.RegularExpresssionFailed(e,regex,line)
# fall back labels.
@@ -477,7 +479,7 @@ def make_generate_cover_settings(param):
(template,regexp,setting) = map( lambda x: x.strip(), line.split("=>") )
re_compile(regexp,line)
vlist.append((template,regexp,setting))
- except Exception, e:
+ except Exception as e:
raise exceptions.PersonalIniFailed(e,line,param)
return vlist
@@ -636,17 +638,17 @@ class Configuration(ConfigParser.SafeConfigParser):
val = self.get(section,key)
if val and val.lower() == "false":
val = False
- #print "getConfig(%s)=[%s]%s" % (key,section,val)
+ #print("getConfig(%s)=[%s]%s" % (key,section,val))
break
- except (ConfigParser.NoOptionError, ConfigParser.NoSectionError), e:
+ except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
pass
for section in sections[::-1]:
# 'martian smiley' [::-1] reverses list by slicing whole list with -1 step.
try:
val = val + self.get(section,"add_to_"+key)
- #print "getConfig(add_to_%s)=[%s]%s" % (key,section,val)
- except (ConfigParser.NoOptionError, ConfigParser.NoSectionError), e:
+ #print("getConfig(add_to_%s)=[%s]%s" % (key,section,val))
+ except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
pass
return val
@@ -655,7 +657,7 @@ class Configuration(ConfigParser.SafeConfigParser):
def get_config_list(self, sections, key, default=[]):
vlist = re.split(r'(? float(self.getConfig("chardet_confidence_limit",0.9)):
logger.debug("using chardet detected encoding:%s(%s)"%(detected['encoding'],detected['confidence']))
code=detected['encoding']
@@ -1115,12 +1117,12 @@ class Configuration(ConfigParser.SafeConfigParser):
extrasleep=extrasleep,
referer=referer)
return (self._decode(data),opened)
- except u2.HTTPError, he:
+ except u2.HTTPError as he:
excpt=he
if he.code in (403,404,410):
logger.debug("Caught an exception reading URL: %s Exception %s."%(unicode(safe_url(url)),unicode(he)))
break # break out on 404
- except Exception, e:
+ except Exception as e:
excpt=e
logger.debug("Caught an exception reading URL: %s sleeptime(%s) Exception %s."%(unicode(safe_url(url)),sleeptime,unicode(e)))
diff --git a/fanficfare/gziphttp.py b/fanficfare/gziphttp.py
index 92ebb641..45974b42 100644
--- a/fanficfare/gziphttp.py
+++ b/fanficfare/gziphttp.py
@@ -1,10 +1,10 @@
## Borrowed from http://techknack.net/python-urllib2-handlers/
-import urllib2
+from six.moves.urllib_request import BaseHandler
from gzip import GzipFile
-from StringIO import StringIO
+from six import StringIO
-class GZipProcessor(urllib2.BaseHandler):
+class GZipProcessor(BaseHandler):
"""A handler to add gzip capabilities to urllib2 requests
"""
def http_request(self, req):
From 611e6cecf2be0e4c1ac956c4767d6ad71fd023ed Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 25 Jul 2018 17:51:48 -0500
Subject: [PATCH 002/120] Working towards python 2.7 & 3 cross compatibility.
---
fanficfare/adapters/__init__.py | 7 +++----
fanficfare/adapters/adapter_test1.py | 2 +-
fanficfare/adapters/base_adapter.py | 6 +++---
fanficfare/cli.py | 1 +
fanficfare/cli3.py | 1 +
5 files changed, 9 insertions(+), 8 deletions(-)
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index cae72b6f..9e63abce 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -19,12 +19,11 @@ from __future__ import absolute_import
import os, re, sys, glob, types
from os.path import dirname, basename, normpath
import logging
-from six.moves.urllib.parse import urlparse as up
+from six.moves.urllib.parse import urlparse
logger = logging.getLogger(__name__)
print(sys.path)
-from fanficfare.configurable import Configuration
from .. import exceptions as exceptions
@@ -226,7 +225,7 @@ def getNormalStoryURL(url):
def getNormalStoryURLSite(url):
# print("getNormalStoryURLSite:%s"%url)
if not getNormalStoryURL.__dummyconfig:
- getNormalStoryURL.__dummyconfig = Configuration(["test1.com"],"EPUB",lightweight=True)
+ getNormalStoryURL.__dummyconfig = configurable.Configuration(["test1.com"],"EPUB",lightweight=True)
# pulling up an adapter is pretty low over-head. If
# it fails, it's a bad url.
try:
@@ -300,7 +299,7 @@ def _get_class_for(url):
if not "#post-" in fixedurl:
fixedurl = re.sub(r"#.*$","",fixedurl)
- parsedUrl = up.urlparse(fixedurl)
+ parsedUrl = urlparse(fixedurl)
domain = parsedUrl.netloc.lower()
if( domain != parsedUrl.netloc ):
fixedurl = fixedurl.replace(parsedUrl.netloc,domain)
diff --git a/fanficfare/adapters/adapter_test1.py b/fanficfare/adapters/adapter_test1.py
index 745f2204..156764aa 100644
--- a/fanficfare/adapters/adapter_test1.py
+++ b/fanficfare/adapters/adapter_test1.py
@@ -22,7 +22,7 @@ logger = logging.getLogger(__name__)
from .. import exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class TestSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/base_adapter.py b/fanficfare/adapters/base_adapter.py
index 329eda94..e62800c7 100644
--- a/fanficfare/adapters/base_adapter.py
+++ b/fanficfare/adapters/base_adapter.py
@@ -20,7 +20,7 @@ from datetime import datetime, timedelta
from collections import defaultdict
import logging
-import urlparse as up
+from six.moves.urllib.parse import urlparse
from functools import partial
import traceback
import copy
@@ -125,7 +125,7 @@ class BaseSiteAdapter(Configurable):
def _setURL(self,url):
self.url = url
- self.parsedUrl = up.urlparse(url)
+ self.parsedUrl = urlparse(url)
self.host = self.parsedUrl.netloc
self.path = self.parsedUrl.path
self.story.setMetadata('storyUrl',self.url,condremoveentities=False)
@@ -503,7 +503,7 @@ class BaseSiteAdapter(Configurable):
if t.name=='script':
t.extract()
- except AttributeError, ae:
+ except AttributeError as ae:
if "%s"%ae != "'NoneType' object has no attribute 'next_element'":
logger.error("Error parsing HTML, probably poor input HTML. %s"%ae)
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 4748b41f..6f9f5a7d 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -33,6 +33,7 @@ import cookielib as cl
version="2.27.12"
os.environ['CURRENT_VERSION_ID']=version
+print("Python Version:%s"%sys.version)
if sys.version_info < (2, 5) or sys.version_info > (3,0):
print('This program requires Python 2.5 or newer. Python 3 is not supported.')
sys.exit(1)
diff --git a/fanficfare/cli3.py b/fanficfare/cli3.py
index f7ea7dc1..6a8106f7 100644
--- a/fanficfare/cli3.py
+++ b/fanficfare/cli3.py
@@ -32,6 +32,7 @@ import http.cookiejar as cl
version="2.27.8"
+print("Python Version:%s"%sys.version)
if sys.version_info < (3,0):
print('this program requires python 3 or newer.')
sys.exit(1)
From ac3b288f3b66e6f85f575698791c2b62a226b23c Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 12:29:01 -0500
Subject: [PATCH 003/120] Working towards python 2.7 & 3 cross compatibility.
---
fanficfare/adapters/base_adapter.py | 8 ++++----
fanficfare/htmlcleanup.py | 6 +++---
fanficfare/htmlheuristics.py | 2 +-
fanficfare/mobi.py | 2 +-
fanficfare/{html.py => mobihtml.py} | 6 ++++--
fanficfare/story.py | 9 ++++-----
6 files changed, 17 insertions(+), 16 deletions(-)
rename fanficfare/{html.py => mobihtml.py} (97%)
diff --git a/fanficfare/adapters/base_adapter.py b/fanficfare/adapters/base_adapter.py
index e62800c7..bacd7b82 100644
--- a/fanficfare/adapters/base_adapter.py
+++ b/fanficfare/adapters/base_adapter.py
@@ -25,7 +25,7 @@ from functools import partial
import traceback
import copy
-import bs4
+from bs4 import BeautifulSoup
from ..htmlcleanup import stripHTML
from ..htmlheuristics import replace_br_with_p
@@ -397,7 +397,7 @@ class BaseSiteAdapter(Configurable):
if isinstance(svalue,basestring):
# bs4/html5lib add html, header and body tags, which
# we don't want. utf8FromSoup will strip the body tags for us.
- svalue = bs4.BeautifulSoup(svalue,"html5lib").body
+ svalue = BeautifulSoup(svalue,"html5lib").body
self.story.setMetadata('description',self.utf8FromSoup(url,svalue))
else:
self.story.setMetadata('description',stripHTML(svalue))
@@ -546,8 +546,8 @@ class BaseSiteAdapter(Configurable):
## soup and re-soup because BS4/html5lib is more forgiving of
## incorrectly nested tags that way.
- soup = bs4.BeautifulSoup(data,'html5lib')
- soup = bs4.BeautifulSoup(unicode(soup),'html5lib')
+ soup = BeautifulSoup(data,'html5lib')
+ soup = BeautifulSoup(unicode(soup),'html5lib')
for ns in soup.find_all('fff_hide_noscript'):
ns.name = 'noscript'
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index 997cedcd..22acab8f 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -81,10 +81,10 @@ def removeEntities(text, space_only=False):
try:
t = text.decode('utf-8')
- except (UnicodeEncodeError,UnicodeDecodeError), e:
+ except (UnicodeEncodeError,UnicodeDecodeError) as e:
try:
t = text.encode ('ascii', 'xmlcharrefreplace')
- except (UnicodeEncodeError,UnicodeDecodeError), e:
+ except (UnicodeEncodeError,UnicodeDecodeError) as e:
t = text
text = t
# replace numeric versions of [&<>] with named versions,
@@ -106,7 +106,7 @@ def removeEntities(text, space_only=False):
continue
try:
text = text.replace(e, v)
- except UnicodeDecodeError, ex:
+ except UnicodeDecodeError as ex:
# for the pound symbol in constants.py
text = text.replace(e, v.decode('utf-8'))
diff --git a/fanficfare/htmlheuristics.py b/fanficfare/htmlheuristics.py
index 5f4a8ee4..7b520bd1 100644
--- a/fanficfare/htmlheuristics.py
+++ b/fanficfare/htmlheuristics.py
@@ -22,7 +22,7 @@ import codecs
import bs4 as bs
import HtmlTagStack as stack
-from . import exceptions as exceptions
+import exceptions
def logdebug(s):
# uncomment for debug output
diff --git a/fanficfare/mobi.py b/fanficfare/mobi.py
index 7a527154..2c1f1fa2 100644
--- a/fanficfare/mobi.py
+++ b/fanficfare/mobi.py
@@ -10,7 +10,7 @@ import logging
logger = logging.getLogger(__name__)
-from html import HtmlProcessor
+from mobihtml import HtmlProcessor
# http://wiki.mobileread.com/wiki/MOBI
# http://membres.lycos.fr/microfirst/palm/pdb.html
diff --git a/fanficfare/html.py b/fanficfare/mobihtml.py
similarity index 97%
rename from fanficfare/html.py
rename to fanficfare/mobihtml.py
index c2bf4891..7782a43c 100644
--- a/fanficfare/html.py
+++ b/fanficfare/mobihtml.py
@@ -5,9 +5,11 @@
import re
import sys
-import StringIO
-import urllib
+from six import StringIO
+from six.moves import urllib
+# import bs4
+# BeautifulSoup = bs4.BeautifulSoup
from bs4 import BeautifulSoup
class HtmlProcessor:
diff --git a/fanficfare/story.py b/fanficfare/story.py
index 95bd761e..70852df9 100644
--- a/fanficfare/story.py
+++ b/fanficfare/story.py
@@ -18,7 +18,7 @@
import os, re
import copy
from collections import defaultdict
-import urlparse
+from six.moves.urllib.parse import urlparse
import string
import json
import datetime
@@ -26,7 +26,6 @@ from math import floor
from functools import partial
import logging
logger = logging.getLogger(__name__)
-import urlparse as up
import bs4
@@ -147,7 +146,7 @@ except:
## also used for explicit no image processing.
def no_convert_image(url,data):
- parsedUrl = up.urlparse(url)
+ parsedUrl = urlparse.urlparse(url)
ext=parsedUrl.path[parsedUrl.path.rfind('.')+1:].lower()
@@ -1184,7 +1183,7 @@ class Story(Configurable):
else:
try:
sizes = [ int(x) for x in self.getConfigList('image_max_size') ]
- except Exception, e:
+ except Exception as e:
raise exceptions.FailedToDownload("Failed to parse image_max_size from personal.ini:%s\nException: %s"%(self.getConfigList('image_max_size'),e))
grayscale = self.getConfig('grayscale_images')
imgtype = self.getConfig('convert_images_to')
@@ -1201,7 +1200,7 @@ class Story(Configurable):
removetrans,
imgtype,
background="#"+self.getConfig('background_color'))
- except Exception, e:
+ except Exception as e:
logger.info("Failed to load or convert image, \nparent:%s\nskipping:%s\nException: %s"%(parenturl,imgurl,e))
return ("failedtoload","failedtoload")
From 1a2392a8c895f294e28e09d22a8d06bb7ff27fe0 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 14:54:03 -0500
Subject: [PATCH 004/120] Working towards python 2.7 & 3 cross compatibility.
---
fanficfare/adapters/__init__.py | 4 +++-
fanficfare/configurable.py | 7 ++++++-
fanficfare/writers/__init__.py | 14 +++++++-------
fanficfare/writers/base_writer.py | 26 ++++++++++++++------------
fanficfare/writers/writer_epub.py | 4 ++--
fanficfare/writers/writer_html.py | 2 +-
fanficfare/writers/writer_mobi.py | 4 ++--
fanficfare/writers/writer_txt.py | 2 +-
8 files changed, 36 insertions(+), 27 deletions(-)
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index 9e63abce..cad90e3e 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -189,9 +189,11 @@ __class_list = []
__domain_map = {}
def imports():
+ out = []
for name, val in globals().items():
if isinstance(val, types.ModuleType):
- yield val.__name__
+ out.append(val.__name__)
+ return out
for x in imports():
if "fanficfare.adapters.adapter_" in x:
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index fa3408ee..9dd7e69c 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -71,7 +71,12 @@ from gziphttp import GZipProcessor
logger = logging.getLogger(__name__)
-import adapters
+# It's all fault of David Beazley!
+try:
+ from . import adapters
+except ImportError:
+ import sys
+ adapters = sys.modules["fanficfare.adapters"]
def re_compile(regex,line):
try:
diff --git a/fanficfare/writers/__init__.py b/fanficfare/writers/__init__.py
index c0e3863c..0d378170 100644
--- a/fanficfare/writers/__init__.py
+++ b/fanficfare/writers/__init__.py
@@ -14,16 +14,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
-## This could (should?) use a dynamic loader like adapters, but for
-## now, it's static, since there's so few of them.
+from __future__ import absolute_import
from ..exceptions import FailedToDownload
-from writer_html import HTMLWriter
-from writer_txt import TextWriter
-from writer_epub import EpubWriter
-from writer_mobi import MobiWriter
+## This could (should?) use a dynamic loader like adapters, but for
+## now, it's static, since there's so few of them.
+from .writer_html import HTMLWriter
+from .writer_txt import TextWriter
+from .writer_epub import EpubWriter
+from .writer_mobi import MobiWriter
def getWriter(type,config,story):
if type == "html":
diff --git a/fanficfare/writers/base_writer.py b/fanficfare/writers/base_writer.py
index bc3aeb34..958253de 100644
--- a/fanficfare/writers/base_writer.py
+++ b/fanficfare/writers/base_writer.py
@@ -14,12 +14,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+from __future__ import absolute_import
import re
import os.path
import datetime
import string
-import StringIO
+from six import StringIO
import zipfile
from zipfile import ZipFile, ZIP_DEFLATED
import logging
@@ -41,10 +42,10 @@ class BaseStoryWriter(Configurable):
def __init__(self, configuration, adapter):
Configurable.__init__(self, configuration)
-
+
self.adapter = adapter
self.story = adapter.getStoryMetadataOnly() # only cache the metadata initially.
-
+
self.story.setMetadata('formatname',self.getFormatName())
self.story.setMetadata('formatext',self.getFormatExt())
@@ -59,12 +60,13 @@ class BaseStoryWriter(Configurable):
def getBaseFileName(self):
return self.story.formatFileName(self.getConfig('output_filename'),self.getConfig('allow_unsafe_filename'))
-
+
def getZipFileName(self):
return self.story.formatFileName(self.getConfig('zip_filename'),self.getConfig('allow_unsafe_filename'))
def _write(self, out, text):
- out.write(text.encode('utf8'))
+ # instead of text.encode('utf8')
+ out.write(six.ensure_text(text))
def writeTitlePage(self, out, START, ENTRY, END, WIDE_ENTRY=None, NO_TITLE_ENTRY=None):
"""
@@ -89,7 +91,7 @@ class BaseStoryWriter(Configurable):
if self.hasConfig("titlepage_no_title_entry"):
NO_TITLE_ENTRY = string.Template(self.getConfig("titlepage_no_title_entry"))
-
+
self._write(out,START.substitute(self.story.getAllMetadata()))
if WIDE_ENTRY==None:
@@ -120,7 +122,7 @@ class BaseStoryWriter(Configurable):
# 'no title' option if there is one.
if label == "" and NO_TITLE_ENTRY:
TEMPLATE= NO_TITLE_ENTRY
-
+
self._write(out,TEMPLATE.substitute({'label':label,
'id':entry,
'value':self.story.getMetadata(entry)}))
@@ -145,7 +147,7 @@ class BaseStoryWriter(Configurable):
if self.hasConfig("tocpage_end"):
END = string.Template(self.getConfig("tocpage_end"))
-
+
self._write(out,START.substitute(self.story.getAllMetadata()))
for index, chap in enumerate(self.story.getChapters(fortoc=True)):
@@ -170,7 +172,7 @@ class BaseStoryWriter(Configurable):
condremoveentities=False)
else:
self.story.setMetadata("output_css",'')
-
+
if not outstream:
close=True
logger.info("Save directly to file: %s" % outfilename)
@@ -191,7 +193,7 @@ class BaseStoryWriter(Configurable):
if fileupdated > lastupdated:
logger.warn("File(%s) Updated(%s) more recently than Story(%s) - Skipping" % (outfilename,fileupdated,lastupdated))
return
- if not metaonly:
+ if not metaonly:
self.story = self.adapter.getStory() # get full story
# now, just
# before writing.
@@ -228,7 +230,7 @@ class BaseStoryWriter(Configurable):
def writeFile(self, filename, data):
logger.debug("writeFile:%s"%filename)
-
+
if self.getConfig('zip_output'):
outputdirs = os.path.dirname(self.getBaseFileName())
if outputdirs:
@@ -242,7 +244,7 @@ class BaseStoryWriter(Configurable):
dir = os.path.dirname(filename)
if not os.path.exists(dir):
os.mkdir(dir) ## os.makedirs() doesn't work in 2.5.2?
-
+
outstream = open(filename,"wb")
outstream.write(data)
outstream.close()
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index 049126d0..2a2a34a1 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -17,7 +17,7 @@
import logging
import string
-import StringIO
+from six import StringIO
import zipfile
from zipfile import ZipFile, ZIP_STORED, ZIP_DEFLATED
import urllib
@@ -29,7 +29,7 @@ from xml.dom.minidom import parse, parseString, getDOMImplementation
import bs4
-from base_writer import *
+from .base_writer import *
from ..htmlcleanup import stripHTML,removeEntities
from ..story import commaGroups
diff --git a/fanficfare/writers/writer_html.py b/fanficfare/writers/writer_html.py
index cd727b1c..09664b82 100644
--- a/fanficfare/writers/writer_html.py
+++ b/fanficfare/writers/writer_html.py
@@ -20,7 +20,7 @@ import string
import bs4
-from base_writer import *
+from .base_writer import *
class HTMLWriter(BaseStoryWriter):
diff --git a/fanficfare/writers/writer_mobi.py b/fanficfare/writers/writer_mobi.py
index f37ee2fe..97fdfcbc 100644
--- a/fanficfare/writers/writer_mobi.py
+++ b/fanficfare/writers/writer_mobi.py
@@ -17,9 +17,9 @@
import logging
import string
-import StringIO
+from six import StringIO
-from base_writer import *
+from .base_writer import *
from ..htmlcleanup import stripHTML
from ..mobi import Converter
from ..exceptions import FailedToWriteOutput
diff --git a/fanficfare/writers/writer_txt.py b/fanficfare/writers/writer_txt.py
index b5c10647..7199311e 100644
--- a/fanficfare/writers/writer_txt.py
+++ b/fanficfare/writers/writer_txt.py
@@ -19,7 +19,7 @@ import logging
import string
from textwrap import wrap
-from base_writer import *
+from .base_writer import *
from html2text import html2text
From cea3773e4f9a92e3f93d3d46746eb520fc3a1570 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 15:19:21 -0500
Subject: [PATCH 005/120] Working towards python 2.7 & 3 cross compatibility.
---
fanficfare/configurable.py | 25 ++++++++++++++-----------
fanficfare/writers/base_writer.py | 5 +++--
fanficfare/writers/writer_epub.py | 10 +++++-----
3 files changed, 22 insertions(+), 18 deletions(-)
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 9dd7e69c..9cb62271 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -22,9 +22,11 @@ import codecs
import six.moves.configparser as ConfigParser
from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
from six.moves import urllib
-from six.moves import urllib as u2
-from six.moves.urllib.parse import urlparse as up
+from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
+from six.moves.urllib import parse as up
from six.moves import http_cookiejar as cl
+# unicode in py2, str in py3
+from six import text_type as unicode
import time
import logging
@@ -71,7 +73,8 @@ from gziphttp import GZipProcessor
logger = logging.getLogger(__name__)
-# It's all fault of David Beazley!
+# Work around for fact that py3 apparently doesn't allow/ignore
+# recursive imports like py2 does.
try:
from . import adapters
except ImportError:
@@ -481,7 +484,7 @@ def make_generate_cover_settings(param):
for line in param.splitlines():
if "=>" in line:
try:
- (template,regexp,setting) = map( lambda x: x.strip(), line.split("=>") )
+ (template,regexp,setting) = [ x.strip() for x in line.split("=>") ]
re_compile(regexp,line)
vlist.append((template,regexp,setting))
except Exception as e:
@@ -537,7 +540,7 @@ class Configuration(ConfigParser.SafeConfigParser):
self.override_sleep = None
self.cookiejar = self.get_empty_cookiejar()
- self.opener = u2.build_opener(u2.HTTPCookieProcessor(self.cookiejar),GZipProcessor())
+ self.opener = build_opener(HTTPCookieProcessor(self.cookiejar),GZipProcessor())
self.pagecache = self.get_empty_pagecache()
@@ -550,9 +553,9 @@ class Configuration(ConfigParser.SafeConfigParser):
## reconstructed completely because removing and re-adding
## a section would mess up the order.
## assumes _dict and _sections from ConfigParser parent.
- self._sections = self._dict((section_url_f(k) if (domain in k and 'http' in k) else k, v) for k, v in self._sections.viewitems())
+ self._sections = self._dict((section_url_f(k) if (domain in k and 'http' in k) else k, v) for k, v in six.viewitems(self._sections))
# logger.debug(self._sections.keys())
- except e:
+ except Exception as e:
logger.warn("Failed to perform section_url_names: %s"%e)
def addUrlConfigSection(self,url):
@@ -896,7 +899,7 @@ class Configuration(ConfigParser.SafeConfigParser):
def set_cookiejar(self,cj):
self.cookiejar = cj
saveheaders = self.opener.addheaders
- self.opener = u2.build_opener(u2.HTTPCookieProcessor(self.cookiejar),GZipProcessor())
+ self.opener = build_opener(HTTPCookieProcessor(self.cookiejar),GZipProcessor())
self.opener.addheaders = saveheaders
def load_cookiejar(self,filename):
@@ -1006,13 +1009,13 @@ class Configuration(ConfigParser.SafeConfigParser):
logger.debug("#####################################\npagecache(POST) MISS: %s"%safe_url(cachekey))
self.do_sleep(extrasleep)
- ## u2.Request assumes POST when data!=None. Also assumes data
+ ## urllib.Request assumes POST when data!=None. Also assumes data
## is application/x-www-form-urlencoded.
if 'Content-type' not in headers:
headers['Content-type']='application/x-www-form-urlencoded'
if 'Accept' not in headers:
headers['Accept']="text/html,*/*"
- req = u2.Request(url,
+ req = urllib.Request(url,
data=urllib.urlencode(parameters),
headers=headers)
@@ -1122,7 +1125,7 @@ class Configuration(ConfigParser.SafeConfigParser):
extrasleep=extrasleep,
referer=referer)
return (self._decode(data),opened)
- except u2.HTTPError as he:
+ except urllib.HTTPError as he:
excpt=he
if he.code in (403,404,410):
logger.debug("Caught an exception reading URL: %s Exception %s."%(unicode(safe_url(url)),unicode(he)))
diff --git a/fanficfare/writers/base_writer.py b/fanficfare/writers/base_writer.py
index 958253de..2ed564ab 100644
--- a/fanficfare/writers/base_writer.py
+++ b/fanficfare/writers/base_writer.py
@@ -20,6 +20,7 @@ import re
import os.path
import datetime
import string
+import six
from six import StringIO
import zipfile
from zipfile import ZipFile, ZIP_DEFLATED
@@ -65,8 +66,8 @@ class BaseStoryWriter(Configurable):
return self.story.formatFileName(self.getConfig('zip_filename'),self.getConfig('allow_unsafe_filename'))
def _write(self, out, text):
- # instead of text.encode('utf8')
- out.write(six.ensure_text(text))
+ # instead of six.ensure_text(text)
+ out.write(text.encode('utf8'))
def writeTitlePage(self, out, START, ENTRY, END, WIDE_ENTRY=None, NO_TITLE_ENTRY=None):
"""
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index 2a2a34a1..59f6204f 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -303,7 +303,7 @@ div { margin: 0pt; padding: 0pt; }
## not on an open stream. OTOH, I suspect we would have had
## problems with closing and opening again to change the
## compression type anyway.
- zipio = StringIO.StringIO()
+ zipio = StringIO()
## mimetype must be first file and uncompressed. Python 2.5
## ZipFile can't change compression type file-by-file, so we
@@ -518,7 +518,7 @@ div { margin: 0pt; padding: 0pt; }
COVER = string.Template(self.getConfig("cover_content"))
else:
COVER = self.EPUB_COVER
- coverIO = StringIO.StringIO()
+ coverIO = StringIO()
coverIO.write(COVER.substitute(dict(self.story.getAllMetadata().items()+{'coverimg':self.story.cover}.items())))
if self.getConfig("include_titlepage"):
@@ -655,7 +655,7 @@ div { margin: 0pt; padding: 0pt; }
outputepub.writestr("OEBPS/cover.xhtml",coverIO.getvalue())
coverIO.close()
- titlepageIO = StringIO.StringIO()
+ titlepageIO = StringIO()
self.writeTitlePage(out=titlepageIO,
START=TITLE_PAGE_START,
ENTRY=TITLE_ENTRY,
@@ -667,7 +667,7 @@ div { margin: 0pt; padding: 0pt; }
titlepageIO.close()
# write toc page.
- tocpageIO = StringIO.StringIO()
+ tocpageIO = StringIO()
self.writeTOCPage(tocpageIO,
self.EPUB_TOC_PAGE_START,
self.EPUB_TOC_ENTRY,
@@ -678,7 +678,7 @@ div { margin: 0pt; padding: 0pt; }
if dologpage:
# write log page.
- logpageIO = StringIO.StringIO()
+ logpageIO = StringIO()
self.writeLogPage(logpageIO)
outputepub.writestr("OEBPS/log_page.xhtml",logpageIO.getvalue())
logpageIO.close()
From 957ff3edf463010086ee9505cdd512be10dac015 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 16:00:21 -0500
Subject: [PATCH 006/120] Working towards python 2.7 & 3 cross compatibility.
---
fanficfare/cli3.py | 286 +++++++++++++++---------------
fanficfare/configurable.py | 16 +-
fanficfare/epubutils.py | 2 +-
fanficfare/geturls.py | 18 +-
fanficfare/mobi.py | 4 +-
fanficfare/writers/writer_epub.py | 2 +-
6 files changed, 168 insertions(+), 160 deletions(-)
diff --git a/fanficfare/cli3.py b/fanficfare/cli3.py
index 6a8106f7..99b615f9 100644
--- a/fanficfare/cli3.py
+++ b/fanficfare/cli3.py
@@ -20,7 +20,7 @@ from os.path import expanduser, join, dirname
from os import access, R_OK
from subprocess import call
from six import StringIO
-import six.moves.configparser
+from six.moves import configparser
import getpass
import logging
import pprint
@@ -49,123 +49,123 @@ logger = logging.getLogger('fanficfare')
try:
# running under calibre
from calibre_plugins.fanficfare_plugin.fanficfare import adapters, writers, exceptions
- from calibre_plugins.fanficfare_plugin.fanficfare.configurable import configuration
+ from calibre_plugins.fanficfare_plugin.fanficfare.configurable import Configuration
from calibre_plugins.fanficfare_plugin.fanficfare.epubutils import (
get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
from calibre_plugins.fanficfare_plugin.fanficfare.geturls import get_urls_from_page, get_urls_from_imap
except ImportError:
from fanficfare import adapters, writers, exceptions
- from fanficfare.configurable import configuration
+ from fanficfare.configurable import Configuration
from fanficfare.epubutils import (
get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
from fanficfare.geturls import get_urls_from_page, get_urls_from_imap
-def write_story(config, adapter, writeformat, metaonly=false, outstream=none):
- writer = writers.getwriter(writeformat, config, adapter)
- writer.writestory(outstream=outstream, metaonly=metaonly)
- output_filename = writer.getoutputfilename()
+def write_story(config, adapter, writeformat, metaonly=False, outstream=None):
+ writer = writers.getWriter(writeformat, config, adapter)
+ writer.writeStory(outstream=outstream, metaonly=metaonly)
+ output_filename = writer.getOutputFileName()
del writer
return output_filename
-def main(argv=none,
- parser=none,
- passed_defaultsini=none,
- passed_personalini=none):
- if argv is none:
+def main(argv=None,
+ parser=None,
+ passed_defaultsini=None,
+ passed_personalini=None):
+ if argv is None:
argv = sys.argv[1:]
# read in args, anything starting with -- will be treated as --=
if not parser:
- parser = optionparser('usage: %prog [options] [storyurl]...')
+ parser = OptionParser('usage: %prog [options] [STORYURL]...')
parser.add_option('-f', '--format', dest='format', default='epub',
- help='write story as format, epub(default), mobi, txt or html', metavar='format')
+ help='write story as FORMAT, epub(default), mobi, txt or html', metavar='FORMAT')
if passed_defaultsini:
config_help = 'read config from specified file(s) in addition to calibre plugin personal.ini, ~/.fanficfare/personal.ini, and ./personal.ini'
else:
config_help = 'read config from specified file(s) in addition to ~/.fanficfare/defaults.ini, ~/.fanficfare/personal.ini, ./defaults.ini, and ./personal.ini'
parser.add_option('-c', '--config',
- action='append', dest='configfile', default=none,
- help=config_help, metavar='config')
- range_help = ' --begin and --end will be overridden by a chapter range on the storyurl like storyurl[1-2], storyurl[-3], storyurl[3-] or storyurl[3]'
- parser.add_option('-b', '--begin', dest='begin', default=none,
- help='begin with chapter start.'+range_help, metavar='start')
- parser.add_option('-e', '--end', dest='end', default=none,
- help='end with chapter end.'+range_help, metavar='end')
+ action='append', dest='configfile', default=None,
+ help=config_help, metavar='CONFIG')
+ range_help = ' --begin and --end will be overridden by a chapter range on the STORYURL like STORYURL[1-2], STORYURL[-3], STORYURL[3-] or STORYURL[3]'
+ parser.add_option('-b', '--begin', dest='begin', default=None,
+ help='Begin with Chapter START.'+range_help, metavar='START')
+ parser.add_option('-e', '--end', dest='end', default=None,
+ help='End with Chapter END.'+range_help, metavar='END')
parser.add_option('-o', '--option',
action='append', dest='options',
- help='set an option name=value', metavar='name=value')
+ help='set an option NAME=VALUE', metavar='NAME=VALUE')
parser.add_option('-m', '--meta-only',
action='store_true', dest='metaonly',
- help='retrieve metadata and stop. or, if --update-epub, update metadata title page only.', )
+ help='Retrieve metadata and stop. Or, if --update-epub, update metadata title page only.', )
parser.add_option('--json-meta',
action='store_true', dest='jsonmeta',
- help='when used with --meta-only, output metadata as json. no effect without --meta-only flag', )
+ help='When used with --meta-only, output metadata as JSON. No effect without --meta-only flag', )
parser.add_option('-u', '--update-epub',
action='store_true', dest='update',
- help='update an existing epub(if present) with new chapters. give either epub filename or story url.', )
+ help='Update an existing epub(if present) with new chapters. Give either epub filename or story URL.', )
parser.add_option('--update-cover',
action='store_true', dest='updatecover',
- help='update cover in an existing epub, otherwise existing cover (if any) is used on update. only valid with --update-epub.', )
+ help='Update cover in an existing epub, otherwise existing cover (if any) is used on update. Only valid with --update-epub.', )
parser.add_option('--unnew',
action='store_true', dest='unnew',
- help='remove (new) chapter marks left by mark_new_chapters setting.', )
+ help='Remove (new) chapter marks left by mark_new_chapters setting.', )
parser.add_option('--force',
action='store_true', dest='force',
- help='force overwrite of an existing epub, download and overwrite all chapters.', )
+ help='Force overwrite of an existing epub, download and overwrite all chapters.', )
parser.add_option('-i', '--infile',
- help='give a filename to read for urls (and/or existing epub files with --update-epub).',
- dest='infile', default=none,
- metavar='infile')
+ help='Give a filename to read for URLs (and/or existing EPUB files with --update-epub).',
+ dest='infile', default=None,
+ metavar='INFILE')
parser.add_option('-l', '--list',
- dest='list', default=none, metavar='url',
- help='get list of valid story urls from page given.', )
+ dest='list', default=None, metavar='URL',
+ help='Get list of valid story URLs from page given.', )
parser.add_option('-n', '--normalize-list',
- dest='normalize', default=none, metavar='url',
- help='get list of valid story urls from page given, but normalized to standard forms.', )
+ dest='normalize', default=None, metavar='URL',
+ help='Get list of valid story URLs from page given, but normalized to standard forms.', )
parser.add_option('--download-list',
- dest='downloadlist', default=none, metavar='url',
- help='download story urls retrieved from page given. update existing epubs if used with --update-epub.', )
+ dest='downloadlist', default=None, metavar='URL',
+ help='Download story URLs retrieved from page given. Update existing EPUBs if used with --update-epub.', )
parser.add_option('--imap',
action='store_true', dest='imaplist',
- help='get list of valid story urls from unread email from imap account configured in ini.', )
+ help='Get list of valid story URLs from unread email from IMAP account configured in ini.', )
parser.add_option('--download-imap',
action='store_true', dest='downloadimap',
- help='download valid story urls from unread email from imap account configured in ini. update existing epubs if used with --update-epub.', )
+ help='Download valid story URLs from unread email from IMAP account configured in ini. Update existing EPUBs if used with --update-epub.', )
parser.add_option('-s', '--sites-list',
- action='store_true', dest='siteslist', default=false,
- help='get list of valid story urls examples.', )
+ action='store_true', dest='siteslist', default=False,
+ help='Get list of valid story URLs examples.', )
parser.add_option('--non-interactive',
action='store_false', dest='interactive', default=sys.stdin.isatty() and sys.stdout.isatty(),
- help='prevent interactive prompts (for scripting).', )
+ help='Prevent interactive prompts (for scripting).', )
parser.add_option('-d', '--debug',
action='store_true', dest='debug',
- help='show debug and notice output.', )
+ help='Show debug and notice output.', )
parser.add_option('-p', '--progressbar',
action='store_true', dest='progressbar',
- help='display a simple progress bar while downloading--one dot(.) per network fetch.', )
+ help='Display a simple progress bar while downloading--one dot(.) per network fetch.', )
parser.add_option('-v', '--version',
action='store_true', dest='version',
- help='display version and quit.', )
+ help='Display version and quit.', )
- ## undocumented feature for development use. save page cache and
- ## cookies between runs. saves in pwd as files global_cache and
+ ## undocumented feature for development use. Save page cache and
+ ## cookies between runs. Saves in PWD as files global_cache and
## global_cookies
parser.add_option('--save-cache', '--save_cache',
action='store_true', dest='save_cache',
- help=suppress_help, )
+ help=SUPPRESS_HELP, )
options, args = parser.parse_args(argv)
if options.version:
- print("version: %s" % version)
+ print("Version: %s" % version)
return
if not options.debug:
- logger.setlevel(logging.warning)
+ logger.setLevel(logging.WARNING)
list_only = any((options.imaplist,
options.siteslist,
@@ -175,11 +175,11 @@ def main(argv=none,
if list_only and (args or any((options.downloadimap,
options.downloadlist))):
- parser.error('incorrect arguments: cannot download and list urls at the same time.')
+ parser.error('Incorrect arguments: Cannot download and list URLs at the same time.')
if options.siteslist:
- for site, examples in adapters.getsiteexamples():
- print('\n#### %s\nexample urls:' % site)
+ for site, examples in adapters.getSiteExamples():
+ print('\n#### %s\nExample URLs:' % site)
for u in examples:
print(' * %s' % u)
return
@@ -209,7 +209,7 @@ def main(argv=none,
configuration = get_configuration(options.normalize,
passed_defaultsini,
passed_personalini,options)
- retlist = get_urls_from_page(options.normalize, configuration,normalize=true)
+ retlist = get_urls_from_page(options.normalize, configuration,normalize=True)
print('\n'.join(retlist))
if options.downloadlist:
@@ -222,12 +222,12 @@ def main(argv=none,
if options.imaplist or options.downloadimap:
# list doesn't have a supported site.
configuration = get_configuration('test1.com',passed_defaultsini,passed_personalini,options)
- markread = configuration.getconfig('imap_mark_read') == 'true' or \
- (configuration.getconfig('imap_mark_read') == 'downloadonly' and options.downloadimap)
- retlist = get_urls_from_imap(configuration.getconfig('imap_server'),
- configuration.getconfig('imap_username'),
- configuration.getconfig('imap_password'),
- configuration.getconfig('imap_folder'),
+ markread = configuration.getConfig('imap_mark_read') == 'true' or \
+ (configuration.getConfig('imap_mark_read') == 'downloadonly' and options.downloadimap)
+ retlist = get_urls_from_imap(configuration.getConfig('imap_server'),
+ configuration.getConfig('imap_username'),
+ configuration.getConfig('imap_password'),
+ configuration.getConfig('imap_folder'),
markread)
if options.downloadimap:
@@ -251,14 +251,14 @@ def main(argv=none,
try:
with open('global_cache','rb') as jin:
options.pagecache = pickle.load(jin) # ,encoding="utf-8"
- options.cookiejar = cl.lwpcookiejar()
+ options.cookiejar = cl.LWPCookieJar()
options.cookiejar.load('global_cookies')
except:
print("didn't load global_cache")
if not list_only:
if len(urls) < 1:
- print("no valid story urls found")
+ print("No valid story URLs found")
else:
for url in urls:
try:
@@ -267,10 +267,10 @@ def main(argv=none,
passed_defaultsini,
passed_personalini)
#print("pagecache:%s"%options.pagecache.keys())
- except exception as e:
+ except Exception as e:
if len(urls) == 1:
raise
- print("url(%s) failed: exception (%s). run url individually for more detail."%(url,e))
+ print("URL(%s) Failed: Exception (%s). Run URL individually for more detail."%(url,e))
if options.save_cache:
with open('global_cache','wb') as jout:
@@ -283,9 +283,9 @@ def do_download(arg,
passed_defaultsini,
passed_personalini):
- # attempt to update an existing epub.
- chaptercount = none
- output_filename = none
+ # Attempt to update an existing epub.
+ chaptercount = None
+ output_filename = None
if options.unnew:
# remove mark_new_chapters marks
@@ -296,12 +296,12 @@ def do_download(arg,
try:
url, chaptercount = get_dcsource_chaptercount(arg)
if not url:
- print('no story url found in epub to update.')
+ print('No story URL found in epub to update.')
return
- print('updating %s, url: %s' % (arg, url))
+ print('Updating %s, URL: %s' % (arg, url))
output_filename = arg
- except exception:
- # if there's an error reading the update file, maybe it's a url?
+ except Exception:
+ # if there's an error reading the update file, maybe it's a URL?
# we'll look for an existing outputfile down below.
url = arg
else:
@@ -315,14 +315,14 @@ def do_download(arg,
output_filename)
try:
- # allow chapter range with url.
+ # Allow chapter range with URL.
# like test1.com?sid=5[4-6] or [4,6]
- # overrides cli options if present.
+ # Overrides CLI options if present.
url,ch_begin,ch_end = adapters.get_url_chapter_range(url)
- adapter = adapters.getadapter(configuration, url)
+ adapter = adapters.getAdapter(configuration, url)
- ## share pagecache and cookiejar between multiple downloads.
+ ## Share pagecache and cookiejar between multiple downloads.
if not hasattr(options,'pagecache'):
options.pagecache = configuration.get_empty_pagecache()
if not hasattr(options,'cookiejar'):
@@ -330,36 +330,36 @@ def do_download(arg,
configuration.set_pagecache(options.pagecache)
configuration.set_cookiejar(options.cookiejar)
- # url[begin-end] overrides cli option if present.
+ # url[begin-end] overrides CLI option if present.
if ch_begin or ch_end:
- adapter.setchaptersrange(ch_begin, ch_end)
+ adapter.setChaptersRange(ch_begin, ch_end)
else:
- adapter.setchaptersrange(options.begin, options.end)
+ adapter.setChaptersRange(options.begin, options.end)
- # check for updating from url (vs from file)
+ # check for updating from URL (vs from file)
if options.update and not chaptercount:
try:
- writer = writers.getwriter('epub', configuration, adapter)
- output_filename = writer.getoutputfilename()
+ writer = writers.getWriter('epub', configuration, adapter)
+ output_filename = writer.getOutputFileName()
noturl, chaptercount = get_dcsource_chaptercount(output_filename)
- print('updating %s, url: %s' % (output_filename, url))
- except exception:
- options.update = false
+ print('Updating %s, URL: %s' % (output_filename, url))
+ except Exception:
+ options.update = False
pass
- # check for include_images without no_image_processing. in absence of pil, give warning.
- if adapter.getconfig('include_images') and not adapter.getconfig('no_image_processing'):
+ # Check for include_images without no_image_processing. In absence of PIL, give warning.
+ if adapter.getConfig('include_images') and not adapter.getConfig('no_image_processing'):
try:
- from calibre.utils.magick import image
- except importerror:
+ from calibre.utils.magick import Image
+ except ImportError:
try:
- ## pillow is a more current fork of pil library
- from pil import image
- except importerror:
+ ## Pillow is a more current fork of PIL library
+ from PIL import Image
+ except ImportError:
try:
- import image
- except importerror:
- print("you have include_images enabled, but python image library(pil) isn't found.\nimages will be included full size in original format.\ncontinue? (y/n)?")
+ import Image
+ except ImportError:
+ print("You have include_images enabled, but Python Image Library(PIL) isn't found.\nImages will be included full size in original format.\nContinue? (y/n)?")
if options.interactive:
if not sys.stdin.readline().strip().lower().startswith('y'):
return
@@ -371,39 +371,39 @@ def do_download(arg,
# or a couple tries of one or the other
for x in range(0, 2):
try:
- adapter.getstorymetadataonly()
- except exceptions.failedtologin as f:
+ adapter.getStoryMetadataOnly()
+ except exceptions.FailedToLogin as f:
if not options.interactive:
- print('login failed on non-interactive process. set username and password in personal.ini.')
+ print('Login Failed on non-interactive process. Set username and password in personal.ini.')
return
if f.passwdonly:
- print('story requires a password.')
+ print('Story requires a password.')
else:
- print('login failed, need username/password.')
- sys.stdout.write('username: ')
+ print('Login Failed, Need Username/Password.')
+ sys.stdout.write('Username: ')
adapter.username = sys.stdin.readline().strip()
- adapter.password = getpass.getpass(prompt='password: ')
- # print('login: `%s`, password: `%s`' % (adapter.username, adapter.password))
- except exceptions.adultcheckrequired:
+ adapter.password = getpass.getpass(prompt='Password: ')
+ # print('Login: `%s`, Password: `%s`' % (adapter.username, adapter.password))
+ except exceptions.AdultCheckRequired:
if options.interactive:
- print('please confirm you are an adult in your locale: (y/n)?')
+ print('Please confirm you are an adult in your locale: (y/n)?')
if sys.stdin.readline().strip().lower().startswith('y'):
- adapter.is_adult = true
+ adapter.is_adult = True
else:
- print('adult check required on non-interactive process. set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.')
+ print('Adult check required on non-interactive process. Set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.')
return
if options.update and not options.force:
- urlchaptercount = int(adapter.getstorymetadataonly().getmetadata('numchapters').replace(',',''))
+ urlchaptercount = int(adapter.getStoryMetadataOnly().getMetadata('numChapters').replace(',',''))
# returns int adjusted for start-end range.
- urlchaptercount = adapter.getstorymetadataonly().getchaptercount()
+ urlchaptercount = adapter.getStoryMetadataOnly().getChapterCount()
if chaptercount == urlchaptercount and not options.metaonly:
print('%s already contains %d chapters.' % (output_filename, chaptercount))
elif chaptercount > urlchaptercount:
print('%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount))
elif chaptercount == 0:
- print("%s doesn't contain any recognizable chapters, probably from a different source. not updating." % output_filename)
+ print("%s doesn't contain any recognizable chapters, probably from a different source. Not updating." % output_filename)
else:
# update now handled by pre-populating the old
# images and chapters in the adapter rather than
@@ -418,33 +418,33 @@ def do_download(arg,
adapter.oldchaptersmap,
adapter.oldchaptersdata) = (get_update_data(output_filename))[0:9]
- print('do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount))
+ print('Do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount))
- if not options.update and chaptercount == urlchaptercount and adapter.getconfig('do_update_hook'):
- adapter.hookforupdates(chaptercount)
+ if not options.update and chaptercount == urlchaptercount and adapter.getConfig('do_update_hook'):
+ adapter.hookForUpdates(chaptercount)
- if adapter.getconfig('pre_process_safepattern'):
- metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getconfig('pre_process_safepattern'))
+ if adapter.getConfig('pre_process_safepattern'):
+ metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('pre_process_safepattern'))
else:
- metadata = adapter.story.getallmetadata()
- call(string.template(adapter.getconfig('pre_process_cmd')).substitute(metadata), shell=true)
+ metadata = adapter.story.getAllMetadata()
+ call(string.Template(adapter.getConfig('pre_process_cmd')).substitute(metadata), shell=True)
write_story(configuration, adapter, 'epub')
else:
# regular download
if options.metaonly:
- metadata = adapter.getstorymetadataonly().getallmetadata()
+ metadata = adapter.getStoryMetadataOnly().getAllMetadata()
metadata['zchapters'] = []
for i, chap in enumerate(adapter.get_chapters()):
metadata['zchapters'].append((i+1,chap))
- if not options.metaonly and adapter.getconfig('pre_process_cmd'):
- if adapter.getconfig('pre_process_safepattern'):
- metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getconfig('pre_process_safepattern'))
+ if not options.metaonly and adapter.getConfig('pre_process_cmd'):
+ if adapter.getConfig('pre_process_safepattern'):
+ metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('pre_process_safepattern'))
else:
- metadata = adapter.story.getallmetadata()
- call(string.template(adapter.getconfig('pre_process_cmd')).substitute(metadata), shell=true)
+ metadata = adapter.story.getAllMetadata()
+ call(string.Template(adapter.getConfig('pre_process_cmd')).substitute(metadata), shell=True)
output_filename = write_story(configuration, adapter, options.format, options.metaonly)
@@ -452,42 +452,42 @@ def do_download(arg,
metadata['output_filename'] = output_filename
if options.jsonmeta:
import json
- print(json.dumps(metadata, sort_keys=true,
+ print(json.dumps(metadata, sort_keys=True,
indent=2, separators=(',', ':')))
else:
pprint.pprint(metadata)
- if not options.metaonly and adapter.getconfig('post_process_cmd'):
- if adapter.getconfig('post_process_safepattern'):
- metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getconfig('post_process_safepattern'))
+ if not options.metaonly and adapter.getConfig('post_process_cmd'):
+ if adapter.getConfig('post_process_safepattern'):
+ metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('post_process_safepattern'))
else:
- metadata = adapter.story.getallmetadata()
+ metadata = adapter.story.getAllMetadata()
metadata['output_filename'] = output_filename
- call(string.template(adapter.getconfig('post_process_cmd')).substitute(metadata), shell=true)
+ call(string.Template(adapter.getConfig('post_process_cmd')).substitute(metadata), shell=True)
del adapter
- except exceptions.invalidstoryurl as isu:
+ except exceptions.InvalidStoryURL as isu:
print(isu)
- except exceptions.storydoesnotexist as dne:
+ except exceptions.StoryDoesNotExist as dne:
print(dne)
- except exceptions.unknownsite as us:
+ except exceptions.UnknownSite as us:
print(us)
- except exceptions.accessdenied as ad:
+ except exceptions.AccessDenied as ad:
print(ad)
def get_configuration(url,
passed_defaultsini,
passed_personalini,
options,
- chaptercount=none,
- output_filename=none):
+ chaptercount=None,
+ output_filename=None):
try:
- configuration = configuration(adapters.getconfigsectionsfor(url), options.format)
- except exceptions.unknownsite as e:
+ configuration = Configuration(adapters.getConfigSectionsFor(url), options.format)
+ except exceptions.UnknownSite as e:
if options.list or options.normalize or options.downloadlist:
# list for page doesn't have to be a supported site.
- configuration = configuration(['unknown'], options.format)
+ configuration = Configuration(['unknown'], options.format)
else:
raise e
@@ -497,9 +497,9 @@ def get_configuration(url,
homepath2 = join(expanduser('~'), '.fanficfare')
if passed_defaultsini:
- # new stringio each time rather than pass stringio and rewind
- # for case of list download. just makes more sense to me.
- configuration.readfp(stringio(passed_defaultsini))
+ # new StringIO each time rather than pass StringIO and rewind
+ # for case of list download. Just makes more sense to me.
+ configuration.readfp(StringIO(passed_defaultsini))
else:
# don't need to check existance for our selves.
conflist.append(join(dirname(__file__), 'defaults.ini'))
@@ -508,9 +508,9 @@ def get_configuration(url,
conflist.append('defaults.ini')
if passed_personalini:
- # new stringio each time rather than pass stringio and rewind
- # for case of list download. just makes more sense to me.
- configuration.readfp(stringio(passed_personalini))
+ # new StringIO each time rather than pass StringIO and rewind
+ # for case of list download. Just makes more sense to me.
+ configuration.readfp(StringIO(passed_personalini))
conflist.append(join(homepath, 'personal.ini'))
conflist.append(join(homepath2, 'personal.ini'))
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 9cb62271..7f95d4f2 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -19,14 +19,15 @@ import re
import exceptions
import codecs
+import six
import six.moves.configparser as ConfigParser
from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
from six.moves import urllib
from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
-from six.moves.urllib import parse as up
from six.moves import http_cookiejar as cl
# unicode in py2, str in py3
from six import text_type as unicode
+from six import string_types as basestring
import time
import logging
@@ -664,7 +665,8 @@ class Configuration(ConfigParser.SafeConfigParser):
# split and strip each.
def get_config_list(self, sections, key, default=[]):
vlist = re.split(r'(?'%entry
idx = logfile.rindex(span)+len(span)
values[entry] = logfile[idx:logfile.index('\n',idx)]
- except Exception, e:
+ except Exception as e:
#print("e:%s"%e)
pass
From e3ab18589ba9fe863430e252721fa927ac1ffb09 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 16:44:54 -0500
Subject: [PATCH 007/120] Working towards python 2.7 & 3 cross compatibility.
---
fanficfare/adapters/base_adapter.py | 14 +++++++---
fanficfare/configurable.py | 2 +-
fanficfare/htmlcleanup.py | 7 ++++-
fanficfare/story.py | 42 ++++++++++++++++-------------
fanficfare/writers/base_writer.py | 4 +--
5 files changed, 43 insertions(+), 26 deletions(-)
diff --git a/fanficfare/adapters/base_adapter.py b/fanficfare/adapters/base_adapter.py
index bacd7b82..947ead1e 100644
--- a/fanficfare/adapters/base_adapter.py
+++ b/fanficfare/adapters/base_adapter.py
@@ -15,10 +15,15 @@
# limitations under the License.
#
+from __future__ import absolute_import
import re
from datetime import datetime, timedelta
from collections import defaultdict
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import string_types as basestring
+
import logging
from six.moves.urllib.parse import urlparse
from functools import partial
@@ -46,7 +51,7 @@ class TimeKeeper(defaultdict):
self[name] = self[name] + td
def __unicode__(self):
- keys = self.keys()
+ keys = list(self.keys())
keys.sort()
return u"\n".join([ u"%s: %s"%(k,self[k]) for k in keys ])
import inspect
@@ -416,11 +421,11 @@ class BaseSiteAdapter(Configurable):
if hasattr(soup, '_getAttrMap') and getattr(soup, '_getAttrMap') is not None:
# bs3
#print "bs3 attrs:%s"%soup._getAttrMap().keys()
- return soup._getAttrMap().keys()
+ return list(soup._getAttrMap().keys())
elif hasattr(soup, 'attrs') and isinstance(soup.attrs,dict):
#print "bs4 attrs:%s"%soup.attrs.keys()
# bs4
- return soup.attrs.keys()
+ return list(soup.attrs.keys())
return []
# This gives us a unicode object, not just a string containing bytes.
@@ -599,7 +604,8 @@ def makeDate(string,dateform):
add_hours = True
string = string.replace(u"AM",u"").replace(u"PM",u"").replace(u"am",u"").replace(u"pm",u"")
- date = datetime.strptime(string.encode('utf-8'),dateform.encode('utf-8'))
+ # date = datetime.strptime(string.encode('utf-8'),dateform.encode('utf-8'))
+ date = datetime.strptime(string, dateform)
if add_hours:
date += timedelta(hours=12)
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 7f95d4f2..4e061fd6 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -25,7 +25,7 @@ from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, Parsi
from six.moves import urllib
from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
from six.moves import http_cookiejar as cl
-# unicode in py2, str in py3
+# py2 vs py3 transition
from six import text_type as unicode
from six import string_types as basestring
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index 22acab8f..910cc6ce 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -15,11 +15,16 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import string_types as basestring
+
def _unirepl(match):
"Return the unicode string for a decimal number"
if match.group(1).startswith('x'):
@@ -80,7 +85,7 @@ def removeEntities(text, space_only=False):
return unicode(text)
try:
- t = text.decode('utf-8')
+ t = text #.decode('utf-8')
except (UnicodeEncodeError,UnicodeDecodeError) as e:
try:
t = text.encode ('ascii', 'xmlcharrefreplace')
diff --git a/fanficfare/story.py b/fanficfare/story.py
index 70852df9..294dd228 100644
--- a/fanficfare/story.py
+++ b/fanficfare/story.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2016 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import os, re
import copy
from collections import defaultdict
@@ -26,13 +27,18 @@ from math import floor
from functools import partial
import logging
logger = logging.getLogger(__name__)
+# py2 vs py3 transition
+import six
+from six import text_type as unicode
+from six import string_types as basestring
+from six.moves import map
import bs4
-import exceptions
-from htmlcleanup import conditionalRemoveEntities, removeEntities, removeAllEntities
-from configurable import Configurable, re_compile
-from htmlheuristics import was_run_marker
+from . import exceptions
+from .htmlcleanup import conditionalRemoveEntities, removeEntities, removeAllEntities
+from .configurable import Configurable, re_compile
+from .htmlheuristics import was_run_marker
SPACE_REPLACE=u'\s'
SPLIT_META=u'\,'
@@ -50,7 +56,7 @@ imagetypes = {
try:
from calibre.utils.magick import Image
- from StringIO import StringIO
+ from six import StringIO
from gif import GifInfo, CHECK_IS_ANIMATED
convtype = {'jpg':'JPG', 'png':'PNG'}
@@ -99,7 +105,7 @@ except:
# No calibre routines, try for PIL for CLI.
try:
import Image
- from StringIO import StringIO
+ from six import StringIO
convtype = {'jpg':'JPEG', 'png':'PNG'}
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
@@ -331,7 +337,7 @@ class InExMatch:
(self.keys,self.match) = line.split("!=")
self.match = self.match.replace(SPACE_REPLACE,' ')
self.negate = True
- self.keys = map( lambda x: x.strip(), self.keys.split(",") )
+ self.keys = [x.strip() for x in self.keys.split(",")]
# For conditional, only one key
def is_key(self,key):
@@ -405,7 +411,7 @@ def make_replacements(replace):
if "=>" in line:
parts = line.split("=>")
if len(parts) > 2:
- metakeys = map( lambda x: x.strip(), parts[0].split(",") )
+ metakeys = [x.strip() for x in parts[0].split(",")]
(regexp,replacement)=parts[1:]
else:
(regexp,replacement)=parts
@@ -607,8 +613,8 @@ class Story(Configurable):
raise
for val in retlist:
- retlist = map(partial(self.do_in_ex_clude,'include_metadata_post',key=key),retlist)
- retlist = map(partial(self.do_in_ex_clude,'exclude_metadata_post',key=key),retlist)
+ retlist = list(map(partial(self.do_in_ex_clude,'include_metadata_post',key=key),retlist))
+ retlist = list(map(partial(self.do_in_ex_clude,'exclude_metadata_post',key=key),retlist))
if return_list:
return retlist
@@ -618,7 +624,7 @@ class Story(Configurable):
# for saving an html-ified copy of metadata.
def dump_html_metadata(self):
lines=[]
- for k,v in sorted(self.metadata.iteritems()):
+ for k,v in sorted(six.iteritems(self.metadata)):
classes=['metadata']
if isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
classes.append("datetime")
@@ -688,7 +694,7 @@ class Story(Configurable):
return value
def getMetadataRaw(self,key):
- if self.isValidMetaEntry(key) and self.metadata.has_key(key):
+ if self.isValidMetaEntry(key) and key in self.metadata:
return self.metadata[key]
def getMetadata(self, key,
@@ -710,7 +716,7 @@ class Story(Configurable):
value = self.join_list(key,self.getList(key, removeallentities, doreplacements=True))
if doreplacements:
value = self.doReplacements(value,key+"_LIST")
- elif self.metadata.has_key(key):
+ elif key in self.metadata:
value = self.metadata[key]
if value:
if key in ["numWords","numChapters"]+self.getConfigList("comma_entries",[]):
@@ -867,7 +873,7 @@ class Story(Configurable):
def isList(self,listname):
'Everything set with an include_in_* is considered a list.'
return self.isListType(listname) or \
- ( self.isValidMetaEntry(listname) and self.metadata.has_key(listname) \
+ ( self.isValidMetaEntry(listname) and listname in self.metadata \
and isinstance(self.metadata[listname],list) )
def getList(self,listname,
@@ -947,9 +953,9 @@ class Story(Configurable):
retlist = newretlist
if removeallentities:
- retlist = map(removeAllEntities,retlist)
+ retlist = list(map(removeAllEntities,retlist))
- retlist = filter( lambda x : x!=None and x!='' ,retlist)
+ retlist = [x for x in retlist if x!=None and x!='']
if listname == 'genre' and self.getConfig('add_genre_when_multi_category') and len(self.getList('category',
removeallentities=False,
@@ -983,7 +989,7 @@ class Story(Configurable):
tags_list = self.getConfigList("include_subject_tags") + self.getConfigList("extra_subject_tags")
# metadata all go into dc:subject tags, but only if they are configured.
- for (name,value) in self.getAllMetadata(removeallentities=removeallentities,keeplists=True).iteritems():
+ for (name,value) in six.iteritems(self.getAllMetadata(removeallentities=removeallentities,keeplists=True)):
if name+'.SPLIT' in tags_list:
flist=[]
if isinstance(value,list):
diff --git a/fanficfare/writers/base_writer.py b/fanficfare/writers/base_writer.py
index 2ed564ab..3e97a356 100644
--- a/fanficfare/writers/base_writer.py
+++ b/fanficfare/writers/base_writer.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -213,7 +213,7 @@ class BaseStoryWriter(Configurable):
# above, it will only
# fetch once.
if self.getConfig('zip_output'):
- out = StringIO.StringIO()
+ out = StringIO()
self.zipout = ZipFile(outstream, 'w', compression=ZIP_DEFLATED)
self.writeStoryImpl(out)
self.zipout.writestr(self.getBaseFileName(),out.getvalue())
From 0b9ea4bebb0ec40deed87546e0777599e1daa5b1 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 17:26:24 -0500
Subject: [PATCH 008/120] test1.com with epub/txt/html output working, mobi
broken.
---
fanficfare/mobi.py | 12 +++++++++---
fanficfare/mobihtml.py | 13 ++++++++-----
fanficfare/writers/base_writer.py | 9 ++++++---
fanficfare/writers/writer_epub.py | 29 ++++++++++++++++++-----------
fanficfare/writers/writer_mobi.py | 14 ++++++++++----
5 files changed, 51 insertions(+), 26 deletions(-)
diff --git a/fanficfare/mobi.py b/fanficfare/mobi.py
index 76bae5e8..adc95096 100644
--- a/fanficfare/mobi.py
+++ b/fanficfare/mobi.py
@@ -1,13 +1,18 @@
#!/usr/bin/python
# Copyright(c) 2009 Andrew Chatham and Vijay Pandurangan
+# Changes Copyright 2018 FanFicFare team
-from six import StringIO
import struct
import time
import random
import logging
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import string_types as basestring
+from six import BytesIO # StringIO under py2
+
logger = logging.getLogger(__name__)
from mobihtml import HtmlProcessor
@@ -57,12 +62,12 @@ class Converter:
self._refresh_url = refresh_url
def ConvertString(self, s):
- out = StringIO.StringIO()
+ out = BytesIO()
self._ConvertStringToFile(s, out)
return out.getvalue()
def ConvertStrings(self, html_strs):
- out = StringIO.StringIO()
+ out = BytesIO()
self._ConvertStringsToFile(html_strs, out)
return out.getvalue()
@@ -126,6 +131,7 @@ class Converter:
tmp = self.MakeOneHTML(html_strs)
self._ConvertStringToFile(tmp, out_file)
except Exception as e:
+ raise
logger.error('Error %s', e)
#logger.debug('Details: %s' % html_strs)
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index 7782a43c..c0dcf029 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -5,8 +5,11 @@
import re
import sys
-from six import StringIO
-from six.moves import urllib
+from six.moves.urllib.parse import unquote
+
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import binary_type as bytes
# import bs4
# BeautifulSoup = bs4.BeautifulSoup
@@ -55,14 +58,14 @@ class HtmlProcessor:
# str() instead of unicode() rather than figure out how to fix
# ancient mobi.py code.
- assembled_text = str(self._soup)
+ assembled_text = unicode(self._soup)
del self._soup # shouldn't touch this anymore
for anchor_num, original_ref in self._anchor_references:
- ref = urllib.unquote(original_ref[1:]) # remove leading '#'
+ ref = unquote(original_ref[1:]) # remove leading '#'
# Find the position of ref in the utf-8 document.
# TODO(chatham): Using regexes and looking for name= would be better.
- newpos = assembled_text.rfind(ref.encode('utf-8'))
+ newpos = assembled_text.rfind(ref) # .encode('utf-8')
if newpos == -1:
print >>sys.stderr, 'Could not find anchor "%s"' % original_ref
continue
diff --git a/fanficfare/writers/base_writer.py b/fanficfare/writers/base_writer.py
index 3e97a356..12eada78 100644
--- a/fanficfare/writers/base_writer.py
+++ b/fanficfare/writers/base_writer.py
@@ -21,11 +21,14 @@ import os.path
import datetime
import string
import six
-from six import StringIO
import zipfile
from zipfile import ZipFile, ZIP_DEFLATED
import logging
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import BytesIO # StringIO under py2
+
from ..configurable import Configurable
from ..htmlcleanup import removeEntities, removeAllEntities, stripHTML
@@ -179,7 +182,7 @@ class BaseStoryWriter(Configurable):
logger.info("Save directly to file: %s" % outfilename)
if self.getConfig('make_directories'):
path=""
- outputdirs = os.path.dirname(outfilename).split('/')
+ outputdirs = os.path.dirname(unicode(outfilename)).split('/')
for dir in outputdirs:
path+=dir+"/"
if not os.path.exists(path):
@@ -213,7 +216,7 @@ class BaseStoryWriter(Configurable):
# above, it will only
# fetch once.
if self.getConfig('zip_output'):
- out = StringIO()
+ out = BytesIO()
self.zipout = ZipFile(outstream, 'w', compression=ZIP_DEFLATED)
self.writeStoryImpl(out)
self.zipout.writestr(self.getBaseFileName(),out.getvalue())
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index 8ecfeda0..93bad44f 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,14 +15,19 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
import string
-from six import StringIO
import zipfile
from zipfile import ZipFile, ZIP_STORED, ZIP_DEFLATED
import urllib
import re
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import string_types as basestring
+from six import BytesIO # StringIO under py2
+
## XML isn't as forgiving as HTML, so rather than generate as strings,
## use DOM to generate the XML files.
from xml.dom.minidom import parse, parseString, getDOMImplementation
@@ -299,11 +304,11 @@ div { margin: 0pt; padding: 0pt; }
def writeStoryImpl(self, out):
## Python 2.5 ZipFile is rather more primative than later
- ## versions. It can operate on a file, or on a StringIO, but
+ ## versions. It can operate on a file, or on a BytesIO, but
## not on an open stream. OTOH, I suspect we would have had
## problems with closing and opening again to change the
## compression type anyway.
- zipio = StringIO()
+ zipio = BytesIO()
## mimetype must be first file and uncompressed. Python 2.5
## ZipFile can't change compression type file-by-file, so we
@@ -518,8 +523,8 @@ div { margin: 0pt; padding: 0pt; }
COVER = string.Template(self.getConfig("cover_content"))
else:
COVER = self.EPUB_COVER
- coverIO = StringIO()
- coverIO.write(COVER.substitute(dict(self.story.getAllMetadata().items()+{'coverimg':self.story.cover}.items())))
+ coverIO = BytesIO()
+ coverIO.write(COVER.substitute(dict(list(self.story.getAllMetadata().items())+list({'coverimg':self.story.cover}.items()))))
if self.getConfig("include_titlepage"):
items.append(("title_page","OEBPS/title_page.xhtml","application/xhtml+xml","Title Page"))
@@ -577,8 +582,10 @@ div { margin: 0pt; padding: 0pt; }
contentxml = contentdom.toxml(encoding='utf-8')
# tweak for brain damaged Nook STR. Nook insists on name before content.
- contentxml = contentxml.replace(''%coverimgid,
- ''%coverimgid)
+ contentxml = unicode(contentxml).replace(''%coverimgid,
+ ''%coverimgid)
+
+
outputepub.writestr("content.opf",contentxml)
contentdom.unlink()
@@ -655,7 +662,7 @@ div { margin: 0pt; padding: 0pt; }
outputepub.writestr("OEBPS/cover.xhtml",coverIO.getvalue())
coverIO.close()
- titlepageIO = StringIO()
+ titlepageIO = BytesIO()
self.writeTitlePage(out=titlepageIO,
START=TITLE_PAGE_START,
ENTRY=TITLE_ENTRY,
@@ -667,7 +674,7 @@ div { margin: 0pt; padding: 0pt; }
titlepageIO.close()
# write toc page.
- tocpageIO = StringIO()
+ tocpageIO = BytesIO()
self.writeTOCPage(tocpageIO,
self.EPUB_TOC_PAGE_START,
self.EPUB_TOC_ENTRY,
@@ -678,7 +685,7 @@ div { margin: 0pt; padding: 0pt; }
if dologpage:
# write log page.
- logpageIO = StringIO()
+ logpageIO = BytesIO()
self.writeLogPage(logpageIO)
outputepub.writestr("OEBPS/log_page.xhtml",logpageIO.getvalue())
logpageIO.close()
diff --git a/fanficfare/writers/writer_mobi.py b/fanficfare/writers/writer_mobi.py
index 97fdfcbc..ec7a2599 100644
--- a/fanficfare/writers/writer_mobi.py
+++ b/fanficfare/writers/writer_mobi.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,15 +15,21 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
import string
-from six import StringIO
from .base_writer import *
from ..htmlcleanup import stripHTML
from ..mobi import Converter
from ..exceptions import FailedToWriteOutput
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import string_types as basestring
+from six import binary_type as bytes
+from six import BytesIO # StringIO under py2
+
logger = logging.getLogger(__name__)
class MobiWriter(BaseStoryWriter):
@@ -128,7 +134,7 @@ ${value}
NO_TITLE_ENTRY = self.MOBI_NO_TITLE_ENTRY
TITLE_PAGE_END = self.MOBI_TITLE_PAGE_END
- titlepageIO = StringIO.StringIO()
+ titlepageIO = BytesIO()
self.writeTitlePage(out=titlepageIO,
START=TITLE_PAGE_START,
ENTRY=TITLE_ENTRY,
@@ -142,7 +148,7 @@ ${value}
## MOBI always has a TOC injected by mobi.py because there's
## no meta-data TOC.
# # write toc page.
- # tocpageIO = StringIO.StringIO()
+ # tocpageIO = BytesIO()
# self.writeTOCPage(tocpageIO,
# self.MOBI_TOC_PAGE_START,
# self.MOBI_TOC_ENTRY,
From 04d77dd21429c2b6f6633d1d7fa4c75f5ed148e6 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 17:39:08 -0500
Subject: [PATCH 009/120] Dual compatible cli.py.
---
fanficfare/adapters/__init__.py | 2 -
fanficfare/cli.py | 80 ++---
fanficfare/cli3.py | 554 --------------------------------
3 files changed, 40 insertions(+), 596 deletions(-)
delete mode 100644 fanficfare/cli3.py
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index cad90e3e..80a29cd5 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -23,10 +23,8 @@ from six.moves.urllib.parse import urlparse
logger = logging.getLogger(__name__)
-print(sys.path)
from .. import exceptions as exceptions
-
## must import each adapter here.
from . import adapter_test1
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 6f9f5a7d..7e79f9db 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -19,8 +19,8 @@ from optparse import OptionParser, SUPPRESS_HELP
from os.path import expanduser, join, dirname
from os import access, R_OK
from subprocess import call
-from StringIO import StringIO
-import ConfigParser
+from six import StringIO
+from six.moves import configparser
import getpass
import logging
import pprint
@@ -28,14 +28,13 @@ import string
import os, sys
import pickle
-import cookielib as cl
+from six.moves import http_cookiejar as cl
version="2.27.12"
os.environ['CURRENT_VERSION_ID']=version
-print("Python Version:%s"%sys.version)
-if sys.version_info < (2, 5) or sys.version_info > (3,0):
- print('This program requires Python 2.5 or newer. Python 3 is not supported.')
+if sys.version_info < (2, 5):
+ print('This program requires Python 2.5 or newer.')
sys.exit(1)
if sys.version_info >= (2, 7):
@@ -73,6 +72,7 @@ def main(argv=None,
parser=None,
passed_defaultsini=None,
passed_personalini=None):
+ logger.debug("Python Version:%s"%sys.version)
if argv is None:
argv = sys.argv[1:]
# read in args, anything starting with -- will be treated as --=
@@ -180,9 +180,9 @@ def main(argv=None,
if options.siteslist:
for site, examples in adapters.getSiteExamples():
- print '\n#### %s\nExample URLs:' % site
+ print('\n#### %s\nExample URLs:' % site)
for u in examples:
- print ' * %s' % u
+ print(' * %s' % u)
return
if options.update and options.format != 'epub':
@@ -204,14 +204,14 @@ def main(argv=None,
passed_defaultsini,
passed_personalini,options)
retlist = get_urls_from_page(options.list, configuration)
- print '\n'.join(retlist)
+ print('\n'.join(retlist))
if options.normalize:
configuration = get_configuration(options.normalize,
passed_defaultsini,
passed_personalini,options)
retlist = get_urls_from_page(options.normalize, configuration,normalize=True)
- print '\n'.join(retlist)
+ print('\n'.join(retlist))
if options.downloadlist:
configuration = get_configuration(options.downloadlist,
@@ -234,18 +234,18 @@ def main(argv=None,
if options.downloadimap:
urls.extend(retlist)
else:
- print '\n'.join(retlist)
+ print('\n'.join(retlist))
# for passing in a file list
if options.infile:
with open(options.infile,"r") as infile:
- #print "File exists and is readable"
+ #print("file exists and is readable")
for url in infile:
if '#' in url:
url = url[:url.find('#')].strip()
url = url.strip()
if len(url) > 0:
- #print "URL: (%s)"%url
+ #print("url: (%s)"%url)
urls.append(url)
if options.save_cache:
@@ -255,11 +255,11 @@ def main(argv=None,
options.cookiejar = cl.LWPCookieJar()
options.cookiejar.load('global_cookies')
except:
- print("Didn't load global_cache")
+ print("didn't load global_cache")
if not list_only:
if len(urls) < 1:
- print "No valid story URLs found"
+ print("No valid story URLs found")
else:
for url in urls:
try:
@@ -268,10 +268,10 @@ def main(argv=None,
passed_defaultsini,
passed_personalini)
#print("pagecache:%s"%options.pagecache.keys())
- except Exception, e:
+ except Exception as e:
if len(urls) == 1:
raise
- print "URL(%s) Failed: Exception (%s). Run URL individually for more detail."%(url,e)
+ print("URL(%s) Failed: Exception (%s). Run URL individually for more detail."%(url,e))
if options.save_cache:
with open('global_cache','wb') as jout:
@@ -297,9 +297,9 @@ def do_download(arg,
try:
url, chaptercount = get_dcsource_chaptercount(arg)
if not url:
- print 'No story URL found in epub to update.'
+ print('No story URL found in epub to update.')
return
- print 'Updating %s, URL: %s' % (arg, url)
+ print('Updating %s, URL: %s' % (arg, url))
output_filename = arg
except Exception:
# if there's an error reading the update file, maybe it's a URL?
@@ -343,7 +343,7 @@ def do_download(arg,
writer = writers.getWriter('epub', configuration, adapter)
output_filename = writer.getOutputFileName()
noturl, chaptercount = get_dcsource_chaptercount(output_filename)
- print 'Updating %s, URL: %s' % (output_filename, url)
+ print('Updating %s, URL: %s' % (output_filename, url))
except Exception:
options.update = False
pass
@@ -360,38 +360,38 @@ def do_download(arg,
try:
import Image
except ImportError:
- print "You have include_images enabled, but Python Image Library(PIL) isn't found.\nImages will be included full size in original format.\nContinue? (y/n)?"
+ print("You have include_images enabled, but Python Image Library(PIL) isn't found.\nImages will be included full size in original format.\nContinue? (y/n)?")
if options.interactive:
if not sys.stdin.readline().strip().lower().startswith('y'):
return
else:
# for non-interactive, default the response to yes and continue processing
- print 'y'
+ print('y')
# three tries, that's enough if both user/pass & is_adult needed,
# or a couple tries of one or the other
for x in range(0, 2):
try:
adapter.getStoryMetadataOnly()
- except exceptions.FailedToLogin, f:
+ except exceptions.FailedToLogin as f:
if not options.interactive:
- print 'Login Failed on non-interactive process. Set username and password in personal.ini.'
+ print('Login Failed on non-interactive process. Set username and password in personal.ini.')
return
if f.passwdonly:
- print 'Story requires a password.'
+ print('Story requires a password.')
else:
- print 'Login Failed, Need Username/Password.'
+ print('Login Failed, Need Username/Password.')
sys.stdout.write('Username: ')
adapter.username = sys.stdin.readline().strip()
adapter.password = getpass.getpass(prompt='Password: ')
# print('Login: `%s`, Password: `%s`' % (adapter.username, adapter.password))
except exceptions.AdultCheckRequired:
if options.interactive:
- print 'Please confirm you are an adult in your locale: (y/n)?'
+ print('Please confirm you are an adult in your locale: (y/n)?')
if sys.stdin.readline().strip().lower().startswith('y'):
adapter.is_adult = True
else:
- print 'Adult check required on non-interactive process. Set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.'
+ print('Adult check required on non-interactive process. Set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.')
return
if options.update and not options.force:
@@ -400,11 +400,11 @@ def do_download(arg,
urlchaptercount = adapter.getStoryMetadataOnly().getChapterCount()
if chaptercount == urlchaptercount and not options.metaonly:
- print '%s already contains %d chapters.' % (output_filename, chaptercount)
+ print('%s already contains %d chapters.' % (output_filename, chaptercount))
elif chaptercount > urlchaptercount:
- print '%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount)
+ print('%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount))
elif chaptercount == 0:
- print "%s doesn't contain any recognizable chapters, probably from a different source. Not updating." % output_filename
+ print("%s doesn't contain any recognizable chapters, probably from a different source. Not updating." % output_filename)
else:
# update now handled by pre-populating the old
# images and chapters in the adapter rather than
@@ -419,7 +419,7 @@ def do_download(arg,
adapter.oldchaptersmap,
adapter.oldchaptersdata) = (get_update_data(output_filename))[0:9]
- print 'Do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount)
+ print('Do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount))
if not options.update and chaptercount == urlchaptercount and adapter.getConfig('do_update_hook'):
adapter.hookForUpdates(chaptercount)
@@ -453,8 +453,8 @@ def do_download(arg,
metadata['output_filename'] = output_filename
if options.jsonmeta:
import json
- print json.dumps(metadata, sort_keys=True,
- indent=2, separators=(',', ':'))
+ print(json.dumps(metadata, sort_keys=True,
+ indent=2, separators=(',', ':')))
else:
pprint.pprint(metadata)
@@ -469,13 +469,13 @@ def do_download(arg,
del adapter
except exceptions.InvalidStoryURL as isu:
- print isu
+ print(isu)
except exceptions.StoryDoesNotExist as dne:
- print dne
+ print(dne)
except exceptions.UnknownSite as us:
- print us
+ print(us)
except exceptions.AccessDenied as ad:
- print ad
+ print(ad)
def get_configuration(url,
passed_defaultsini,
@@ -485,7 +485,7 @@ def get_configuration(url,
output_filename=None):
try:
configuration = Configuration(adapters.getConfigSectionsFor(url), options.format)
- except exceptions.UnknownSite, e:
+ except exceptions.UnknownSite as e:
if options.list or options.normalize or options.downloadlist:
# list for page doesn't have to be a supported site.
configuration = Configuration(['unknown'], options.format)
@@ -524,7 +524,7 @@ def get_configuration(url,
try:
configuration.add_section('overrides')
- except ConfigParser.DuplicateSectionError:
+ except configparser.DuplicateSectionError:
pass
if options.force:
diff --git a/fanficfare/cli3.py b/fanficfare/cli3.py
deleted file mode 100644
index 99b615f9..00000000
--- a/fanficfare/cli3.py
+++ /dev/null
@@ -1,554 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from optparse import OptionParser, SUPPRESS_HELP
-from os.path import expanduser, join, dirname
-from os import access, R_OK
-from subprocess import call
-from six import StringIO
-from six.moves import configparser
-import getpass
-import logging
-import pprint
-import string
-import sys
-
-import pickle
-import http.cookiejar as cl
-
-version="2.27.8"
-
-print("Python Version:%s"%sys.version)
-if sys.version_info < (3,0):
- print('this program requires python 3 or newer.')
- sys.exit(1)
-
-# if sys.version_info >= (2, 7):
-# # suppresses default logger. logging is setup in fanficfare/__init__.py so it works in calibre, too.
-# rootlogger = logging.getlogger()
-# loghandler = logging.nullhandler()
-# loghandler.setformatter(logging.formatter('(=====)(levelname)s:%(message)s'))
-# rootlogger.addhandler(loghandler)
-
-logger = logging.getLogger('fanficfare')
-
-try:
- # running under calibre
- from calibre_plugins.fanficfare_plugin.fanficfare import adapters, writers, exceptions
- from calibre_plugins.fanficfare_plugin.fanficfare.configurable import Configuration
- from calibre_plugins.fanficfare_plugin.fanficfare.epubutils import (
- get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
- from calibre_plugins.fanficfare_plugin.fanficfare.geturls import get_urls_from_page, get_urls_from_imap
-except ImportError:
- from fanficfare import adapters, writers, exceptions
- from fanficfare.configurable import Configuration
- from fanficfare.epubutils import (
- get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
- from fanficfare.geturls import get_urls_from_page, get_urls_from_imap
-
-
-def write_story(config, adapter, writeformat, metaonly=False, outstream=None):
- writer = writers.getWriter(writeformat, config, adapter)
- writer.writeStory(outstream=outstream, metaonly=metaonly)
- output_filename = writer.getOutputFileName()
- del writer
- return output_filename
-
-def main(argv=None,
- parser=None,
- passed_defaultsini=None,
- passed_personalini=None):
- if argv is None:
- argv = sys.argv[1:]
- # read in args, anything starting with -- will be treated as --=
- if not parser:
- parser = OptionParser('usage: %prog [options] [STORYURL]...')
- parser.add_option('-f', '--format', dest='format', default='epub',
- help='write story as FORMAT, epub(default), mobi, txt or html', metavar='FORMAT')
- if passed_defaultsini:
- config_help = 'read config from specified file(s) in addition to calibre plugin personal.ini, ~/.fanficfare/personal.ini, and ./personal.ini'
- else:
- config_help = 'read config from specified file(s) in addition to ~/.fanficfare/defaults.ini, ~/.fanficfare/personal.ini, ./defaults.ini, and ./personal.ini'
- parser.add_option('-c', '--config',
- action='append', dest='configfile', default=None,
- help=config_help, metavar='CONFIG')
- range_help = ' --begin and --end will be overridden by a chapter range on the STORYURL like STORYURL[1-2], STORYURL[-3], STORYURL[3-] or STORYURL[3]'
- parser.add_option('-b', '--begin', dest='begin', default=None,
- help='Begin with Chapter START.'+range_help, metavar='START')
- parser.add_option('-e', '--end', dest='end', default=None,
- help='End with Chapter END.'+range_help, metavar='END')
- parser.add_option('-o', '--option',
- action='append', dest='options',
- help='set an option NAME=VALUE', metavar='NAME=VALUE')
- parser.add_option('-m', '--meta-only',
- action='store_true', dest='metaonly',
- help='Retrieve metadata and stop. Or, if --update-epub, update metadata title page only.', )
- parser.add_option('--json-meta',
- action='store_true', dest='jsonmeta',
- help='When used with --meta-only, output metadata as JSON. No effect without --meta-only flag', )
- parser.add_option('-u', '--update-epub',
- action='store_true', dest='update',
- help='Update an existing epub(if present) with new chapters. Give either epub filename or story URL.', )
- parser.add_option('--update-cover',
- action='store_true', dest='updatecover',
- help='Update cover in an existing epub, otherwise existing cover (if any) is used on update. Only valid with --update-epub.', )
- parser.add_option('--unnew',
- action='store_true', dest='unnew',
- help='Remove (new) chapter marks left by mark_new_chapters setting.', )
- parser.add_option('--force',
- action='store_true', dest='force',
- help='Force overwrite of an existing epub, download and overwrite all chapters.', )
- parser.add_option('-i', '--infile',
- help='Give a filename to read for URLs (and/or existing EPUB files with --update-epub).',
- dest='infile', default=None,
- metavar='INFILE')
-
- parser.add_option('-l', '--list',
- dest='list', default=None, metavar='URL',
- help='Get list of valid story URLs from page given.', )
- parser.add_option('-n', '--normalize-list',
- dest='normalize', default=None, metavar='URL',
- help='Get list of valid story URLs from page given, but normalized to standard forms.', )
- parser.add_option('--download-list',
- dest='downloadlist', default=None, metavar='URL',
- help='Download story URLs retrieved from page given. Update existing EPUBs if used with --update-epub.', )
-
- parser.add_option('--imap',
- action='store_true', dest='imaplist',
- help='Get list of valid story URLs from unread email from IMAP account configured in ini.', )
-
- parser.add_option('--download-imap',
- action='store_true', dest='downloadimap',
- help='Download valid story URLs from unread email from IMAP account configured in ini. Update existing EPUBs if used with --update-epub.', )
-
- parser.add_option('-s', '--sites-list',
- action='store_true', dest='siteslist', default=False,
- help='Get list of valid story URLs examples.', )
- parser.add_option('--non-interactive',
- action='store_false', dest='interactive', default=sys.stdin.isatty() and sys.stdout.isatty(),
- help='Prevent interactive prompts (for scripting).', )
- parser.add_option('-d', '--debug',
- action='store_true', dest='debug',
- help='Show debug and notice output.', )
- parser.add_option('-p', '--progressbar',
- action='store_true', dest='progressbar',
- help='Display a simple progress bar while downloading--one dot(.) per network fetch.', )
- parser.add_option('-v', '--version',
- action='store_true', dest='version',
- help='Display version and quit.', )
-
- ## undocumented feature for development use. Save page cache and
- ## cookies between runs. Saves in PWD as files global_cache and
- ## global_cookies
- parser.add_option('--save-cache', '--save_cache',
- action='store_true', dest='save_cache',
- help=SUPPRESS_HELP, )
-
- options, args = parser.parse_args(argv)
-
- if options.version:
- print("Version: %s" % version)
- return
-
- if not options.debug:
- logger.setLevel(logging.WARNING)
-
- list_only = any((options.imaplist,
- options.siteslist,
- options.list,
- options.normalize,
- ))
-
- if list_only and (args or any((options.downloadimap,
- options.downloadlist))):
- parser.error('Incorrect arguments: Cannot download and list URLs at the same time.')
-
- if options.siteslist:
- for site, examples in adapters.getSiteExamples():
- print('\n#### %s\nExample URLs:' % site)
- for u in examples:
- print(' * %s' % u)
- return
-
- if options.update and options.format != 'epub':
- parser.error('-u/--update-epub only works with epub')
-
- if options.unnew and options.format != 'epub':
- parser.error('--unnew only works with epub')
-
- urls=args
-
- if not list_only and not (args or any((options.infile,
- options.downloadimap,
- options.downloadlist))):
- parser.print_help();
- return
-
- if options.list:
- configuration = get_configuration(options.list,
- passed_defaultsini,
- passed_personalini,options)
- retlist = get_urls_from_page(options.list, configuration)
- print('\n'.join(retlist))
-
- if options.normalize:
- configuration = get_configuration(options.normalize,
- passed_defaultsini,
- passed_personalini,options)
- retlist = get_urls_from_page(options.normalize, configuration,normalize=True)
- print('\n'.join(retlist))
-
- if options.downloadlist:
- configuration = get_configuration(options.downloadlist,
- passed_defaultsini,
- passed_personalini,options)
- retlist = get_urls_from_page(options.downloadlist, configuration)
- urls.extend(retlist)
-
- if options.imaplist or options.downloadimap:
- # list doesn't have a supported site.
- configuration = get_configuration('test1.com',passed_defaultsini,passed_personalini,options)
- markread = configuration.getConfig('imap_mark_read') == 'true' or \
- (configuration.getConfig('imap_mark_read') == 'downloadonly' and options.downloadimap)
- retlist = get_urls_from_imap(configuration.getConfig('imap_server'),
- configuration.getConfig('imap_username'),
- configuration.getConfig('imap_password'),
- configuration.getConfig('imap_folder'),
- markread)
-
- if options.downloadimap:
- urls.extend(retlist)
- else:
- print('\n'.join(retlist))
-
- # for passing in a file list
- if options.infile:
- with open(options.infile,"r") as infile:
- #print("file exists and is readable")
- for url in infile:
- if '#' in url:
- url = url[:url.find('#')].strip()
- url = url.strip()
- if len(url) > 0:
- #print("url: (%s)"%url)
- urls.append(url)
-
- if options.save_cache:
- try:
- with open('global_cache','rb') as jin:
- options.pagecache = pickle.load(jin) # ,encoding="utf-8"
- options.cookiejar = cl.LWPCookieJar()
- options.cookiejar.load('global_cookies')
- except:
- print("didn't load global_cache")
-
- if not list_only:
- if len(urls) < 1:
- print("No valid story URLs found")
- else:
- for url in urls:
- try:
- do_download(url,
- options,
- passed_defaultsini,
- passed_personalini)
- #print("pagecache:%s"%options.pagecache.keys())
- except Exception as e:
- if len(urls) == 1:
- raise
- print("URL(%s) Failed: Exception (%s). Run URL individually for more detail."%(url,e))
-
- if options.save_cache:
- with open('global_cache','wb') as jout:
- pickle.dump(options.pagecache,jout)
- options.cookiejar.save('global_cookies')
-
-# make rest a function and loop on it.
-def do_download(arg,
- options,
- passed_defaultsini,
- passed_personalini):
-
- # Attempt to update an existing epub.
- chaptercount = None
- output_filename = None
-
- if options.unnew:
- # remove mark_new_chapters marks
- reset_orig_chapters_epub(arg,arg)
- return
-
- if options.update:
- try:
- url, chaptercount = get_dcsource_chaptercount(arg)
- if not url:
- print('No story URL found in epub to update.')
- return
- print('Updating %s, URL: %s' % (arg, url))
- output_filename = arg
- except Exception:
- # if there's an error reading the update file, maybe it's a URL?
- # we'll look for an existing outputfile down below.
- url = arg
- else:
- url = arg
-
- configuration = get_configuration(url,
- passed_defaultsini,
- passed_personalini,
- options,
- chaptercount,
- output_filename)
-
- try:
- # Allow chapter range with URL.
- # like test1.com?sid=5[4-6] or [4,6]
- # Overrides CLI options if present.
- url,ch_begin,ch_end = adapters.get_url_chapter_range(url)
-
- adapter = adapters.getAdapter(configuration, url)
-
- ## Share pagecache and cookiejar between multiple downloads.
- if not hasattr(options,'pagecache'):
- options.pagecache = configuration.get_empty_pagecache()
- if not hasattr(options,'cookiejar'):
- options.cookiejar = configuration.get_empty_cookiejar()
- configuration.set_pagecache(options.pagecache)
- configuration.set_cookiejar(options.cookiejar)
-
- # url[begin-end] overrides CLI option if present.
- if ch_begin or ch_end:
- adapter.setChaptersRange(ch_begin, ch_end)
- else:
- adapter.setChaptersRange(options.begin, options.end)
-
- # check for updating from URL (vs from file)
- if options.update and not chaptercount:
- try:
- writer = writers.getWriter('epub', configuration, adapter)
- output_filename = writer.getOutputFileName()
- noturl, chaptercount = get_dcsource_chaptercount(output_filename)
- print('Updating %s, URL: %s' % (output_filename, url))
- except Exception:
- options.update = False
- pass
-
- # Check for include_images without no_image_processing. In absence of PIL, give warning.
- if adapter.getConfig('include_images') and not adapter.getConfig('no_image_processing'):
- try:
- from calibre.utils.magick import Image
- except ImportError:
- try:
- ## Pillow is a more current fork of PIL library
- from PIL import Image
- except ImportError:
- try:
- import Image
- except ImportError:
- print("You have include_images enabled, but Python Image Library(PIL) isn't found.\nImages will be included full size in original format.\nContinue? (y/n)?")
- if options.interactive:
- if not sys.stdin.readline().strip().lower().startswith('y'):
- return
- else:
- # for non-interactive, default the response to yes and continue processing
- print('y')
-
- # three tries, that's enough if both user/pass & is_adult needed,
- # or a couple tries of one or the other
- for x in range(0, 2):
- try:
- adapter.getStoryMetadataOnly()
- except exceptions.FailedToLogin as f:
- if not options.interactive:
- print('Login Failed on non-interactive process. Set username and password in personal.ini.')
- return
- if f.passwdonly:
- print('Story requires a password.')
- else:
- print('Login Failed, Need Username/Password.')
- sys.stdout.write('Username: ')
- adapter.username = sys.stdin.readline().strip()
- adapter.password = getpass.getpass(prompt='Password: ')
- # print('Login: `%s`, Password: `%s`' % (adapter.username, adapter.password))
- except exceptions.AdultCheckRequired:
- if options.interactive:
- print('Please confirm you are an adult in your locale: (y/n)?')
- if sys.stdin.readline().strip().lower().startswith('y'):
- adapter.is_adult = True
- else:
- print('Adult check required on non-interactive process. Set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.')
- return
-
- if options.update and not options.force:
- urlchaptercount = int(adapter.getStoryMetadataOnly().getMetadata('numChapters').replace(',',''))
- # returns int adjusted for start-end range.
- urlchaptercount = adapter.getStoryMetadataOnly().getChapterCount()
-
- if chaptercount == urlchaptercount and not options.metaonly:
- print('%s already contains %d chapters.' % (output_filename, chaptercount))
- elif chaptercount > urlchaptercount:
- print('%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount))
- elif chaptercount == 0:
- print("%s doesn't contain any recognizable chapters, probably from a different source. Not updating." % output_filename)
- else:
- # update now handled by pre-populating the old
- # images and chapters in the adapter rather than
- # merging epubs.
- (url,
- chaptercount,
- adapter.oldchapters,
- adapter.oldimgs,
- adapter.oldcover,
- adapter.calibrebookmark,
- adapter.logfile,
- adapter.oldchaptersmap,
- adapter.oldchaptersdata) = (get_update_data(output_filename))[0:9]
-
- print('Do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount))
-
- if not options.update and chaptercount == urlchaptercount and adapter.getConfig('do_update_hook'):
- adapter.hookForUpdates(chaptercount)
-
- if adapter.getConfig('pre_process_safepattern'):
- metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('pre_process_safepattern'))
- else:
- metadata = adapter.story.getAllMetadata()
- call(string.Template(adapter.getConfig('pre_process_cmd')).substitute(metadata), shell=True)
-
- write_story(configuration, adapter, 'epub')
-
- else:
- # regular download
- if options.metaonly:
- metadata = adapter.getStoryMetadataOnly().getAllMetadata()
- metadata['zchapters'] = []
- for i, chap in enumerate(adapter.get_chapters()):
- metadata['zchapters'].append((i+1,chap))
-
- if not options.metaonly and adapter.getConfig('pre_process_cmd'):
- if adapter.getConfig('pre_process_safepattern'):
- metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('pre_process_safepattern'))
- else:
- metadata = adapter.story.getAllMetadata()
- call(string.Template(adapter.getConfig('pre_process_cmd')).substitute(metadata), shell=True)
-
- output_filename = write_story(configuration, adapter, options.format, options.metaonly)
-
- if options.metaonly:
- metadata['output_filename'] = output_filename
- if options.jsonmeta:
- import json
- print(json.dumps(metadata, sort_keys=True,
- indent=2, separators=(',', ':')))
- else:
- pprint.pprint(metadata)
-
- if not options.metaonly and adapter.getConfig('post_process_cmd'):
- if adapter.getConfig('post_process_safepattern'):
- metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('post_process_safepattern'))
- else:
- metadata = adapter.story.getAllMetadata()
- metadata['output_filename'] = output_filename
- call(string.Template(adapter.getConfig('post_process_cmd')).substitute(metadata), shell=True)
-
- del adapter
-
- except exceptions.InvalidStoryURL as isu:
- print(isu)
- except exceptions.StoryDoesNotExist as dne:
- print(dne)
- except exceptions.UnknownSite as us:
- print(us)
- except exceptions.AccessDenied as ad:
- print(ad)
-
-def get_configuration(url,
- passed_defaultsini,
- passed_personalini,
- options,
- chaptercount=None,
- output_filename=None):
- try:
- configuration = Configuration(adapters.getConfigSectionsFor(url), options.format)
- except exceptions.UnknownSite as e:
- if options.list or options.normalize or options.downloadlist:
- # list for page doesn't have to be a supported site.
- configuration = Configuration(['unknown'], options.format)
- else:
- raise e
-
- conflist = []
- homepath = join(expanduser('~'), '.fanficdownloader')
- ## also look for .fanficfare now, give higher priority than old dir.
- homepath2 = join(expanduser('~'), '.fanficfare')
-
- if passed_defaultsini:
- # new StringIO each time rather than pass StringIO and rewind
- # for case of list download. Just makes more sense to me.
- configuration.readfp(StringIO(passed_defaultsini))
- else:
- # don't need to check existance for our selves.
- conflist.append(join(dirname(__file__), 'defaults.ini'))
- conflist.append(join(homepath, 'defaults.ini'))
- conflist.append(join(homepath2, 'defaults.ini'))
- conflist.append('defaults.ini')
-
- if passed_personalini:
- # new StringIO each time rather than pass StringIO and rewind
- # for case of list download. Just makes more sense to me.
- configuration.readfp(StringIO(passed_personalini))
-
- conflist.append(join(homepath, 'personal.ini'))
- conflist.append(join(homepath2, 'personal.ini'))
- conflist.append('personal.ini')
-
- if options.configfile:
- conflist.extend(options.configfile)
-
- configuration.read(conflist)
-
- try:
- configuration.add_section('overrides')
- except configparser.DuplicateSectionError:
- pass
-
- if options.force:
- configuration.set('overrides', 'always_overwrite', 'true')
-
- if options.update and chaptercount and output_filename:
- configuration.set('overrides', 'output_filename', output_filename)
-
- if options.update and not options.updatecover:
- configuration.set('overrides', 'never_make_cover', 'true')
-
- # images only for epub, even if the user mistakenly turned it
- # on else where.
- if options.format not in ('epub', 'html'):
- configuration.set('overrides', 'include_images', 'false')
-
- if options.options:
- for opt in options.options:
- (var, val) = opt.split('=')
- configuration.set('overrides', var, val)
-
- if options.progressbar:
- configuration.set('overrides','progressbar','true')
-
- return configuration
-
-if __name__ == '__main__':
- main()
From 0783a74b59325038b45ab2b1dc86621315ef92f9 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 26 Jul 2018 17:41:54 -0500
Subject: [PATCH 010/120] test1.com with epub/txt/html output working, mobi
broken.
---
fanficfare/writers/writer_epub.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index 93bad44f..180b883a 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -581,9 +581,10 @@ div { margin: 0pt; padding: 0pt; }
# write content.opf to zip.
contentxml = contentdom.toxml(encoding='utf-8')
+ # Causes py2 vs py3 issues with encoding nonsense. Skip for now.
# tweak for brain damaged Nook STR. Nook insists on name before content.
- contentxml = unicode(contentxml).replace(''%coverimgid,
- ''%coverimgid)
+ # contentxml = contentxml.replace(''%coverimgid,
+ # ''%coverimgid)
outputepub.writestr("content.opf",contentxml)
From 2d2805f1b85236c69c77698a2473b79eed6360f0 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Sat, 28 Jul 2018 23:06:35 -0500
Subject: [PATCH 011/120] Because of course py3 uses an incompatible pickle
format by default.
---
fanficfare/cli.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 7e79f9db..5b192b0d 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -275,7 +275,7 @@ def main(argv=None,
if options.save_cache:
with open('global_cache','wb') as jout:
- pickle.dump(options.pagecache,jout)
+ pickle.dump(options.pagecache,jout,protocol=2)
options.cookiejar.save('global_cookies')
# make rest a function and loop on it.
From 615b2f54b43811bb4ecae6c0fa30e131b6c1b331 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 10:04:07 -0500
Subject: [PATCH 012/120] Add internal python_version metadata.
---
fanficfare/story.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/fanficfare/story.py b/fanficfare/story.py
index 294dd228..ef3262c0 100644
--- a/fanficfare/story.py
+++ b/fanficfare/story.py
@@ -16,7 +16,7 @@
#
from __future__ import absolute_import
-import os, re
+import os, re, sys
import copy
from collections import defaultdict
from six.moves.urllib.parse import urlparse
@@ -441,6 +441,7 @@ class Story(Configurable):
self.metadata = {'version':os.environ['CURRENT_VERSION_ID']}
except:
self.metadata = {'version':'unknown'}
+ self.metadata['python_version']=sys.version
self.replacements = []
self.in_ex_cludes = {}
self.chapters = [] # chapters will be dict containing(url,title,html,etc)
From ad1ce3bbb010f913a438c2159a58bb788b3ed376 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 10:04:38 -0500
Subject: [PATCH 013/120] ffnet 2.7/3.7 with save-cache working.
---
fanficfare/adapters/__init__.py | 2 +-
fanficfare/adapters/adapter_fanfictionnet.py | 16 +++++++++-----
fanficfare/cli.py | 23 ++++++++++++++------
fanficfare/configurable.py | 16 +++++++++++---
fanficfare/gziphttp.py | 9 ++++----
5 files changed, 45 insertions(+), 21 deletions(-)
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index 80a29cd5..249a01d5 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -28,7 +28,7 @@ from .. import exceptions as exceptions
## must import each adapter here.
from . import adapter_test1
-# import adapter_fanfictionnet
+from . import adapter_fanfictionnet
# import adapter_fanficcastletvnet
# import adapter_fictionalleyorg
# import adapter_fictionpresscom
diff --git a/fanficfare/adapters/adapter_fanfictionnet.py b/fanficfare/adapters/adapter_fanfictionnet.py
index 9140ff6f..79633699 100644
--- a/fanficfare/adapters/adapter_fanfictionnet.py
+++ b/fanficfare/adapters/adapter_fanfictionnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,17 +15,21 @@
# limitations under the License.
#
+from __future__ import absolute_import
from datetime import datetime
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-from urllib import unquote_plus
+
+# py2 vs py3 transition
+from six import text_type as unicode
+from six.moves.urllib.error import HTTPError
+
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
ffnetgenres=["Adventure", "Angst", "Crime", "Drama", "Family", "Fantasy", "Friendship", "General",
"Horror", "Humor", "Hurt-Comfort", "Mystery", "Parody", "Poetry", "Romance", "Sci-Fi",
@@ -100,7 +104,7 @@ class FanFictionNetSiteAdapter(BaseSiteAdapter):
data = self._fetchUrl(url)
#logger.debug("\n===================\n%s\n===================\n"%data)
soup = self.make_soup(data)
- except urllib2.HTTPError as e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(url)
else:
@@ -135,7 +139,7 @@ class FanFictionNetSiteAdapter(BaseSiteAdapter):
and "This request takes too long to process, it is timed out by the server." not in newdata:
logger.debug('=======Found newer chapter: %s' % tryurl)
soup = self.make_soup(newdata)
- except urllib2.HTTPError as e:
+ except HTTPError as e:
if e.code == 503:
raise e
except Exception as e:
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 5b192b0d..cc0c3c9f 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -26,16 +26,25 @@ import logging
import pprint
import string
import os, sys
-
import pickle
+
+if sys.version_info < (2, 5):
+ print('This program requires Python 2.5 or newer.')
+ sys.exit(1)
+elif sys.version_info < (3, 0):
+ reload(sys) # Reload restores 'hidden' setdefaultencoding method
+ sys.setdefaultencoding("utf-8")
+ def pickle_load(f):
+ return pickle.load(f)
+else: # > 3.0
+ def pickle_load(f):
+ return pickle.load(f,encoding="bytes")
+
from six.moves import http_cookiejar as cl
version="2.27.12"
os.environ['CURRENT_VERSION_ID']=version
-if sys.version_info < (2, 5):
- print('This program requires Python 2.5 or newer.')
- sys.exit(1)
if sys.version_info >= (2, 7):
# suppresses default logger. Logging is setup in fanficfare/__init__.py so it works in calibre, too.
@@ -251,11 +260,11 @@ def main(argv=None,
if options.save_cache:
try:
with open('global_cache','rb') as jin:
- options.pagecache = pickle.load(jin) # ,encoding="utf-8"
+ options.pagecache = pickle_load(jin)
options.cookiejar = cl.LWPCookieJar()
options.cookiejar.load('global_cookies')
- except:
- print("didn't load global_cache")
+ except Exception as e:
+ print("didn't load global_cache %s"%e)
if not list_only:
if len(urls) < 1:
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 4e061fd6..7d4d43c8 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -19,13 +19,14 @@ import re
import exceptions
import codecs
+# py2 vs py3 transition
import six
import six.moves.configparser as ConfigParser
from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
from six.moves import urllib
from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
+from six.moves.urllib.error import HTTPError
from six.moves import http_cookiejar as cl
-# py2 vs py3 transition
from six import text_type as unicode
from six import string_types as basestring
@@ -950,6 +951,10 @@ class Configuration(ConfigParser.SafeConfigParser):
## iso-8859-1. Most sites that claim to be iso-8859-1 (and some that
## claim to be utf8) are really windows-1252.
def _decode(self,data):
+ if not hasattr(data,'decode'):
+ ## py3 str() from pickle doesn't have .decode and is
+ ## already decoded.
+ return data
decode = self.getConfigList('website_encodings',
default=["utf8",
"Windows-1252",
@@ -976,8 +981,9 @@ class Configuration(ConfigParser.SafeConfigParser):
return data.decode(code,errors='ignore')
else:
return data.decode(code)
- except:
+ except Exception as e:
logger.debug("code failed:"+code)
+ logger.debug(e)
pass
logger.info("Could not decode story, tried:%s Stripping non-ASCII."%decode)
return "".join([x for x in data if ord(x) < 128])
@@ -1027,6 +1033,8 @@ class Configuration(ConfigParser.SafeConfigParser):
data = self._decode(self.opener.open(req,None,float(self.getConfig('connect_timeout',30.0))).read())
self._progressbar()
+ ## postURL saves data to the pagecache *after* _decode() while
+ ## fetchRaw saves it *before* _decode()--because raw.
self._set_to_pagecache(cachekey,data,url)
return data
@@ -1093,6 +1101,8 @@ class Configuration(ConfigParser.SafeConfigParser):
float(self.getConfig('connect_timeout',30.0)))
self._progressbar()
data = opened.read()
+ ## postURL saves data to the pagecache *after* _decode() while
+ ## fetchRaw saves it *before* _decode()--because raw.
self._set_to_pagecache(cachekey,data,opened.url)
return (data,opened)
@@ -1131,7 +1141,7 @@ class Configuration(ConfigParser.SafeConfigParser):
extrasleep=extrasleep,
referer=referer)
return (self._decode(data),opened)
- except urllib.HTTPError as he:
+ except HTTPError as he:
excpt=he
if he.code in (403,404,410):
logger.debug("Caught an exception reading URL: %s Exception %s."%(unicode(safe_url(url)),unicode(he)))
diff --git a/fanficfare/gziphttp.py b/fanficfare/gziphttp.py
index 45974b42..ddc2ef44 100644
--- a/fanficfare/gziphttp.py
+++ b/fanficfare/gziphttp.py
@@ -1,8 +1,9 @@
## Borrowed from http://techknack.net/python-urllib2-handlers/
-from six.moves.urllib_request import BaseHandler
+from six.moves.urllib.request import BaseHandler
+from six.moves.urllib.response import addinfourl
from gzip import GzipFile
-from six import StringIO
+from six import BytesIO
class GZipProcessor(BaseHandler):
"""A handler to add gzip capabilities to urllib2 requests
@@ -16,7 +17,7 @@ class GZipProcessor(BaseHandler):
#print("Content-Encoding:%s"%resp.headers.get("Content-Encoding"))
if resp.headers.get("Content-Encoding") == "gzip":
gz = GzipFile(
- fileobj=StringIO(resp.read()),
+ fileobj=BytesIO(resp.read()),
mode="r"
)
# resp.read = gz.read
@@ -24,7 +25,7 @@ class GZipProcessor(BaseHandler):
# resp.readline = gz.readline
# resp.next = gz.next
old_resp = resp
- resp = urllib2.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
+ resp = addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
resp.msg = old_resp.msg
return resp
https_response = http_response
From 58402ea6e5af25f6bde2ab771fd3772b165b8326 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 12:34:19 -0500
Subject: [PATCH 014/120] Fix p2/p3 unichr
---
fanficfare/htmlcleanup.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index 910cc6ce..6ae709df 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -24,6 +24,7 @@ import re
# py2 vs py3 transition
from six import text_type as unicode
from six import string_types as basestring
+from six import unichr
def _unirepl(match):
"Return the unicode string for a decimal number"
From a5f67705894aa202f2301d1c8d1f0f05dadda29e Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 12:37:11 -0500
Subject: [PATCH 015/120] Little cleanup & name normalize.
---
fanficfare/cli.py | 4 +++-
fanficfare/configurable.py | 10 +++++-----
fanficfare/htmlcleanup.py | 4 ++--
3 files changed, 10 insertions(+), 8 deletions(-)
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index cc0c3c9f..0cc97d33 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -15,6 +15,8 @@
# limitations under the License.
#
+from __future__ import absolute_import
+from __future__ import print_function
from optparse import OptionParser, SUPPRESS_HELP
from os.path import expanduser, join, dirname
from os import access, R_OK
@@ -264,7 +266,7 @@ def main(argv=None,
options.cookiejar = cl.LWPCookieJar()
options.cookiejar.load('global_cookies')
except Exception as e:
- print("didn't load global_cache %s"%e)
+ print("Didn't load global_cache %s"%e)
if not list_only:
if len(urls) < 1:
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 7d4d43c8..e89835b0 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -21,7 +21,7 @@ import codecs
# py2 vs py3 transition
import six
-import six.moves.configparser as ConfigParser
+from six.moves import configparser
from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
from six.moves import urllib
from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
@@ -495,11 +495,11 @@ def make_generate_cover_settings(param):
return vlist
-class Configuration(ConfigParser.SafeConfigParser):
+class Configuration(configparser.SafeConfigParser):
def __init__(self, sections, fileform, lightweight=False):
site = sections[-1] # first section is site DN.
- ConfigParser.SafeConfigParser.__init__(self)
+ configparser.SafeConfigParser.__init__(self)
self.lightweight = lightweight
self.use_pagecache = False # default to false for old adapters.
@@ -650,7 +650,7 @@ class Configuration(ConfigParser.SafeConfigParser):
val = False
#print("getConfig(%s)=[%s]%s" % (key,section,val))
break
- except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
+ except (configparser.NoOptionError, configparser.NoSectionError) as e:
pass
for section in sections[::-1]:
@@ -658,7 +658,7 @@ class Configuration(ConfigParser.SafeConfigParser):
try:
val = val + self.get(section,"add_to_"+key)
#print("getConfig(add_to_%s)=[%s]%s" % (key,section,val))
- except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
+ except (configparser.NoOptionError, configparser.NoSectionError) as e:
pass
return val
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index 6ae709df..3d1bc41b 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -86,7 +86,7 @@ def removeEntities(text, space_only=False):
return unicode(text)
try:
- t = text #.decode('utf-8')
+ t = text
except (UnicodeEncodeError,UnicodeDecodeError) as e:
try:
t = text.encode ('ascii', 'xmlcharrefreplace')
From 7e9c337fb062f88cad8b7344e825f0682d354ca4 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 13:27:31 -0500
Subject: [PATCH 016/120] Fixes for including/manipulating images.
---
fanficfare/configurable.py | 6 ++---
fanficfare/story.py | 37 ++++++++++++++++++-------------
fanficfare/writers/writer_epub.py | 6 ++---
3 files changed, 27 insertions(+), 22 deletions(-)
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index e89835b0..0770617a 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -938,9 +938,9 @@ class Configuration(configparser.SafeConfigParser):
def _set_to_pagecache(self,cachekey,data,redirectedurl):
if self.use_pagecache:
self.get_pagecache()[cachekey] = (data,redirectedurl)
- # with open('global_cache','wb') as jout:
- # pickle.dump(self.pagecache,jout)
- # self.cookiejar.save('global_cookies')
+ # with open('global_cache','wb') as jout:
+ # pickle.dump(self.pagecache,jout,protocol=2)
+ # self.cookiejar.save('global_cookies')
## website encoding(s)--in theory, each website reports the character
## encoding they use for each page. In practice, some sites report it
diff --git a/fanficfare/story.py b/fanficfare/story.py
index ef3262c0..6de6d699 100644
--- a/fanficfare/story.py
+++ b/fanficfare/story.py
@@ -19,7 +19,6 @@ from __future__ import absolute_import
import os, re, sys
import copy
from collections import defaultdict
-from six.moves.urllib.parse import urlparse
import string
import json
import datetime
@@ -27,8 +26,10 @@ from math import floor
from functools import partial
import logging
logger = logging.getLogger(__name__)
+
# py2 vs py3 transition
import six
+from six.moves.urllib.parse import (urlparse, urlunparse)
from six import text_type as unicode
from six import string_types as basestring
from six.moves import map
@@ -56,12 +57,13 @@ imagetypes = {
try:
from calibre.utils.magick import Image
- from six import StringIO
+ from six import BytesIO
from gif import GifInfo, CHECK_IS_ANIMATED
convtype = {'jpg':'JPG', 'png':'PNG'}
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
+ logger.debug("calibre convert_image called")
if url.lower().endswith('.svg'):
raise exceptions.RejectImage("Calibre image processing chokes on SVG images.")
@@ -73,7 +75,7 @@ try:
nwidth, nheight = sizes
scaled, nwidth, nheight = fit_image(owidth, oheight, nwidth, nheight)
- if normalize_format_name(img.format)=="gif" and GifInfo(StringIO(data),CHECK_IS_ANIMATED).frameCount > 1:
+ if normalize_format_name(img.format)=="gif" and GifInfo(BytesIO(data),CHECK_IS_ANIMATED).frameCount > 1:
raise exceptions.RejectImage("Animated gifs come out poorly--not going to use it.")
if scaled:
@@ -102,15 +104,16 @@ try:
except:
- # No calibre routines, try for PIL for CLI.
+ # No calibre routines, try for Pillow for CLI.
try:
- import Image
- from six import StringIO
+ from PIL import Image
+ from six import BytesIO
convtype = {'jpg':'JPEG', 'png':'PNG'}
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
+ logger.debug("Pillow convert_image called")
export = False
- img = Image.open(StringIO(data))
+ img = Image.open(BytesIO(data))
owidth, oheight = img.size
nwidth, nheight = sizes
@@ -137,7 +140,7 @@ except:
export = True
if export:
- outsio = StringIO()
+ outsio = BytesIO()
img.save(outsio,convtype[imgtype])
return (outsio.getvalue(),imgtype,imagetypes[imgtype])
else:
@@ -145,14 +148,16 @@ except:
return (data,imgtype,imagetypes[imgtype])
except:
+ raise
# No calibre or PIL, simple pass through with mimetype.
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
+ logger.debug("NO convert_image called")
return no_convert_image(url,data)
## also used for explicit no image processing.
def no_convert_image(url,data):
- parsedUrl = urlparse.urlparse(url)
+ parsedUrl = urlparse(url)
ext=parsedUrl.path[parsedUrl.path.rfind('.')+1:].lower()
@@ -1119,7 +1124,7 @@ class Story(Configurable):
else:
values = self.get_filename_safe_metadata()
- return string.Template(template).substitute(values).encode('utf8')
+ return string.Template(template).substitute(values) #.encode('utf8')
# pass fetch in from adapter in case we need the cookies collected
# as well as it's a base_story class method.
@@ -1145,15 +1150,15 @@ class Story(Configurable):
if url.startswith("http") or url.startswith("file") or parenturl == None:
imgurl = url
else:
- parsedUrl = urlparse.urlparse(parenturl)
+ parsedUrl = urlparse(parenturl)
if url.startswith("//") :
- imgurl = urlparse.urlunparse(
+ imgurl = urlunparse(
(parsedUrl.scheme,
'',
url,
'','',''))
elif url.startswith("/") :
- imgurl = urlparse.urlunparse(
+ imgurl = urlunparse(
(parsedUrl.scheme,
parsedUrl.netloc,
url,
@@ -1164,7 +1169,7 @@ class Story(Configurable):
toppath = parsedUrl.path
else:
toppath = parsedUrl.path[:parsedUrl.path.rindex('/')+1]
- imgurl = urlparse.urlunparse(
+ imgurl = urlunparse(
(parsedUrl.scheme,
parsedUrl.netloc,
toppath + url,
@@ -1183,7 +1188,7 @@ class Story(Configurable):
if imgurl.endswith('failedtoload'):
return ("failedtoload","failedtoload")
- parsedUrl = urlparse.urlparse(imgurl)
+ parsedUrl = urlparse(imgurl)
if self.getConfig('no_image_processing'):
(data,ext,mime) = no_convert_image(imgurl,
fetch(imgurl,referer=parenturl))
@@ -1256,7 +1261,7 @@ class Story(Configurable):
def getImgUrls(self):
retlist = []
for i, url in enumerate(self.imgurls):
- #parsedUrl = urlparse.urlparse(url)
+ #parsedUrl = urlparse(url)
retlist.append(self.imgtuples[i])
return retlist
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index 180b883a..e94f27eb 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -524,7 +524,7 @@ div { margin: 0pt; padding: 0pt; }
else:
COVER = self.EPUB_COVER
coverIO = BytesIO()
- coverIO.write(COVER.substitute(dict(list(self.story.getAllMetadata().items())+list({'coverimg':self.story.cover}.items()))))
+ self._write(coverIO,COVER.substitute(dict(list(self.story.getAllMetadata().items())+list({'coverimg':self.story.cover}.items()))))
if self.getConfig("include_titlepage"):
items.append(("title_page","OEBPS/title_page.xhtml","application/xhtml+xml","Title Page"))
@@ -579,14 +579,14 @@ div { margin: 0pt; padding: 0pt; }
package.appendChild(guide)
# write content.opf to zip.
- contentxml = contentdom.toxml(encoding='utf-8')
+ contentxml = contentdom.toxml() # encoding='utf-8'
# Causes py2 vs py3 issues with encoding nonsense. Skip for now.
# tweak for brain damaged Nook STR. Nook insists on name before content.
# contentxml = contentxml.replace(''%coverimgid,
# ''%coverimgid)
-
+
outputepub.writestr("content.opf",contentxml)
contentdom.unlink()
From c80fe7472976a9442c82e8a62bb8f10c1cac3bdc Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 13:41:46 -0500
Subject: [PATCH 017/120] Tweaking writers
---
fanficfare/story.py | 1 -
fanficfare/writers/writer_html.py | 9 ++++++---
fanficfare/writers/writer_mobi.py | 3 ---
fanficfare/writers/writer_txt.py | 3 ++-
4 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/fanficfare/story.py b/fanficfare/story.py
index 6de6d699..6fa49fb9 100644
--- a/fanficfare/story.py
+++ b/fanficfare/story.py
@@ -148,7 +148,6 @@ except:
return (data,imgtype,imagetypes[imgtype])
except:
- raise
# No calibre or PIL, simple pass through with mimetype.
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
diff --git a/fanficfare/writers/writer_html.py b/fanficfare/writers/writer_html.py
index 09664b82..39c2d78d 100644
--- a/fanficfare/writers/writer_html.py
+++ b/fanficfare/writers/writer_html.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2016 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,13 +15,16 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
import string
+# py2 vs py3 transition
+from six import text_type as unicode
+
import bs4
from .base_writer import *
-
class HTMLWriter(BaseStoryWriter):
@staticmethod
@@ -107,7 +110,7 @@ ${output_css}
self._write(out,FILE_START.substitute(self.story.getAllMetadata()))
if self.getConfig('include_images') and self.story.cover:
- self._write(out,COVER.substitute(dict(self.story.getAllMetadata().items()+{'coverimg':self.story.cover}.items())))
+ self._write(out,COVER.substitute(dict(list(self.story.getAllMetadata().items())+list({'coverimg':self.story.cover}.items()))))
self.writeTitlePage(out,
self.HTML_TITLE_PAGE_START,
diff --git a/fanficfare/writers/writer_mobi.py b/fanficfare/writers/writer_mobi.py
index ec7a2599..3806f077 100644
--- a/fanficfare/writers/writer_mobi.py
+++ b/fanficfare/writers/writer_mobi.py
@@ -25,9 +25,6 @@ from ..mobi import Converter
from ..exceptions import FailedToWriteOutput
# py2 vs py3 transition
-from six import text_type as unicode
-from six import string_types as basestring
-from six import binary_type as bytes
from six import BytesIO # StringIO under py2
logger = logging.getLogger(__name__)
diff --git a/fanficfare/writers/writer_txt.py b/fanficfare/writers/writer_txt.py
index 7199311e..9f3c9980 100644
--- a/fanficfare/writers/writer_txt.py
+++ b/fanficfare/writers/writer_txt.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
import string
from textwrap import wrap
From 5a88e7fcf4543aba540472bb02afd6b5cd993ee0 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 13:59:51 -0500
Subject: [PATCH 018/120] p2/3 version of htmlheuristics
---
fanficfare/htmlheuristics.py | 11 ++++++++---
1 file changed, 8 insertions(+), 3 deletions(-)
diff --git a/fanficfare/htmlheuristics.py b/fanficfare/htmlheuristics.py
index 7b520bd1..a14e918a 100644
--- a/fanficfare/htmlheuristics.py
+++ b/fanficfare/htmlheuristics.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,14 +15,19 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
import codecs
import bs4 as bs
-import HtmlTagStack as stack
-import exceptions
+# py2 vs py3 transition
+from six import text_type as unicode
+from six.moves import range
+
+from . import HtmlTagStack as stack
+from . import exceptions as exceptions
def logdebug(s):
# uncomment for debug output
From f2db0cbc01431419a42b6d3e7e66d2991314328e Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 14:00:12 -0500
Subject: [PATCH 019/120] p2/3 version of xenforoforum & SB
---
fanficfare/adapters/__init__.py | 296 +++++++++---------
.../adapters/adapter_forumsspacebattlescom.py | 5 +-
.../adapters/base_xenforoforum_adapter.py | 16 +-
3 files changed, 161 insertions(+), 156 deletions(-)
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index 249a01d5..ceacd2e0 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -29,154 +29,154 @@ from .. import exceptions as exceptions
from . import adapter_test1
from . import adapter_fanfictionnet
-# import adapter_fanficcastletvnet
-# import adapter_fictionalleyorg
-# import adapter_fictionpresscom
-# import adapter_ficwadcom
-# import adapter_fimfictionnet
-# import adapter_mediaminerorg
-# import adapter_potionsandsnitches
-# import adapter_tenhawkpresentscom
-# import adapter_adastrafanficcom
-# import adapter_tthfanficorg
-# import adapter_twilightednet
-# import adapter_whoficcom
-# import adapter_siyecouk
-# import adapter_archiveofourownorg
-# import adapter_ficbooknet
-# import adapter_nfacommunitycom
-# import adapter_midnightwhispers
-# import adapter_ksarchivecom
-# import adapter_archiveskyehawkecom
-# import adapter_squidgeorgpeja
-# import adapter_libraryofmoriacom
-# import adapter_wraithbaitcom
-# import adapter_dramioneorg
-# import adapter_ashwindersycophanthexcom
-# import adapter_chaossycophanthexcom
-# import adapter_erosnsapphosycophanthexcom
-# import adapter_lumossycophanthexcom
-# import adapter_occlumencysycophanthexcom
-# import adapter_phoenixsongnet
-# import adapter_walkingtheplankorg
-# import adapter_dokugacom
-# import adapter_iketernalnet
-# import adapter_storiesofardacom
-# import adapter_destinysgatewaycom
-# import adapter_ncisfictioncom
-# import adapter_fanfiktionde
-# import adapter_ponyfictionarchivenet
-# import adapter_ncisficcom
-# import adapter_nationallibrarynet
-# import adapter_themasquenet
-# import adapter_pretendercentrecom
-# import adapter_darksolaceorg
-# import adapter_finestoriescom
-# import adapter_hpfanficarchivecom
-# import adapter_twilightarchivescom
-# import adapter_nhamagicalworldsus
-# import adapter_hlfictionnet
-# import adapter_dracoandginnycom
-# import adapter_scarvesandcoffeenet
-# import adapter_thepetulantpoetesscom
-# import adapter_wolverineandroguecom
-# import adapter_merlinficdtwinscouk
-# import adapter_thehookupzonenet
-# import adapter_bloodtiesfancom
-# import adapter_qafficcom
-# import adapter_efpfanficnet
-# import adapter_potterficscom
-# import adapter_efictionestelielde
-# import adapter_imagineeficcom
-# import adapter_asr3slashzoneorg
-# import adapter_potterheadsanonymouscom
-# import adapter_fictionpadcom
-# import adapter_storiesonlinenet
-# import adapter_trekiverseorg
-# import adapter_literotica
-# import adapter_voracity2eficcom
-# import adapter_spikeluvercom
-# import adapter_bloodshedversecom
-# import adapter_nocturnallightnet
-# import adapter_fanfichu
-# import adapter_fictionmaniatv
-# import adapter_tolkienfanfiction
-# import adapter_themaplebookshelf
-# import adapter_fannation
-# import adapter_sheppardweircom
-# import adapter_samandjacknet
-# import adapter_csiforensicscom
-# import adapter_lotrfanfictioncom
-# import adapter_fhsarchivecom
-# import adapter_fanfictionjunkiesde
-# import adapter_tgstorytimecom
-# import adapter_itcouldhappennet
-# import adapter_forumsspacebattlescom
-# import adapter_forumssufficientvelocitycom
-# import adapter_forumquestionablequestingcom
-# import adapter_ninelivesarchivecom
-# import adapter_masseffect2in
-# import adapter_quotevcom
-# import adapter_mcstoriescom
-# import adapter_buffygilescom
-# import adapter_andromedawebcom
-# import adapter_artemisfowlcom
-# import adapter_naiceanilmenet
-# import adapter_deepinmysoulnet
-# import adapter_kiarepositorymujajinet
-# import adapter_adultfanfictionorg
-# import adapter_fictionhuntcom
-# import adapter_royalroadl
-# import adapter_chosentwofanficcom
-# import adapter_bdsmlibrarycom
-# import adapter_asexstoriescom
-# import adapter_gluttonyfictioncom
-# import adapter_valentchambercom
-# import adapter_looselugscom
-# import adapter_wwwgiantessworldnet
-# import adapter_lotrgficcom
-# import adapter_tomparisdormcom
-# import adapter_writingwhimsicalwanderingsnet
-# import adapter_sugarquillnet
-# import adapter_wwwarea52hkhnet
-# import adapter_starslibrarynet
-# import adapter_fanficauthorsnet
-# import adapter_fireflyfansnet
-# import adapter_fireflypopulliorg
-# import adapter_sebklainenet
-# import adapter_shriftweborgbfa
-# import adapter_trekfanfictionnet
-# import adapter_wuxiaworldcom
-# import adapter_wwwlushstoriescom
-# import adapter_wwwutopiastoriescom
-# import adapter_sinfuldreamscomunicornfic
-# import adapter_sinfuldreamscomwhisperedmuse
-# import adapter_sinfuldreamscomwickedtemptation
-# import adapter_asianfanficscom
-# import adapter_webnovelcom
-# import adapter_deandamagecom
-# import adapter_imrightbehindyoucom
-# import adapter_mttjustoncenet
-# import adapter_narutoficorg
-# import adapter_starskyhutcharchivenet
-# import adapter_swordborderlineangelcom
-# import adapter_tasteofpoisoninkubationnet
-# import adapter_thebrokenworldorg
-# import adapter_thedelphicexpansecom
-# import adapter_thundercatsfansorg
-# import adapter_unknowableroomorg
-# import adapter_www13hoursorg
-# import adapter_wwwaneroticstorycom
-# import adapter_gravitytalescom
-# import adapter_lcfanficcom
-# import adapter_noveltrovecom
-# import adapter_inkbunnynet
-# import adapter_alternatehistorycom
-# import adapter_wattpadcom
-# import adapter_lightnovelgatecom
-# import adapter_wwwnovelallcom
-# import adapter_wuxiaworldco
-# import adapter_harrypotterfanfictioncom
+# from . import adapter_fanficcastletvnet
+# from . import adapter_fictionalleyorg
+# from . import adapter_fictionpresscom
+# from . import adapter_ficwadcom
+# from . import adapter_fimfictionnet
+# from . import adapter_mediaminerorg
+# from . import adapter_potionsandsnitches
+# from . import adapter_tenhawkpresentscom
+# from . import adapter_adastrafanficcom
+# from . import adapter_tthfanficorg
+# from . import adapter_twilightednet
+# from . import adapter_whoficcom
+# from . import adapter_siyecouk
+# from . import adapter_archiveofourownorg
+# from . import adapter_ficbooknet
+# from . import adapter_nfacommunitycom
+# from . import adapter_midnightwhispers
+# from . import adapter_ksarchivecom
+# from . import adapter_archiveskyehawkecom
+# from . import adapter_squidgeorgpeja
+# from . import adapter_libraryofmoriacom
+# from . import adapter_wraithbaitcom
+# from . import adapter_dramioneorg
+# from . import adapter_ashwindersycophanthexcom
+# from . import adapter_chaossycophanthexcom
+# from . import adapter_erosnsapphosycophanthexcom
+# from . import adapter_lumossycophanthexcom
+# from . import adapter_occlumencysycophanthexcom
+# from . import adapter_phoenixsongnet
+# from . import adapter_walkingtheplankorg
+# from . import adapter_dokugacom
+# from . import adapter_iketernalnet
+# from . import adapter_storiesofardacom
+# from . import adapter_destinysgatewaycom
+# from . import adapter_ncisfictioncom
+# from . import adapter_fanfiktionde
+# from . import adapter_ponyfictionarchivenet
+# from . import adapter_ncisficcom
+# from . import adapter_nationallibrarynet
+# from . import adapter_themasquenet
+# from . import adapter_pretendercentrecom
+# from . import adapter_darksolaceorg
+# from . import adapter_finestoriescom
+# from . import adapter_hpfanficarchivecom
+# from . import adapter_twilightarchivescom
+# from . import adapter_nhamagicalworldsus
+# from . import adapter_hlfictionnet
+# from . import adapter_dracoandginnycom
+# from . import adapter_scarvesandcoffeenet
+# from . import adapter_thepetulantpoetesscom
+# from . import adapter_wolverineandroguecom
+# from . import adapter_merlinficdtwinscouk
+# from . import adapter_thehookupzonenet
+# from . import adapter_bloodtiesfancom
+# from . import adapter_qafficcom
+# from . import adapter_efpfanficnet
+# from . import adapter_potterficscom
+# from . import adapter_efictionestelielde
+# from . import adapter_imagineeficcom
+# from . import adapter_asr3slashzoneorg
+# from . import adapter_potterheadsanonymouscom
+# from . import adapter_fictionpadcom
+# from . import adapter_storiesonlinenet
+# from . import adapter_trekiverseorg
+# from . import adapter_literotica
+# from . import adapter_voracity2eficcom
+# from . import adapter_spikeluvercom
+# from . import adapter_bloodshedversecom
+# from . import adapter_nocturnallightnet
+# from . import adapter_fanfichu
+# from . import adapter_fictionmaniatv
+# from . import adapter_tolkienfanfiction
+# from . import adapter_themaplebookshelf
+# from . import adapter_fannation
+# from . import adapter_sheppardweircom
+# from . import adapter_samandjacknet
+# from . import adapter_csiforensicscom
+# from . import adapter_lotrfanfictioncom
+# from . import adapter_fhsarchivecom
+# from . import adapter_fanfictionjunkiesde
+# from . import adapter_tgstorytimecom
+# from . import adapter_itcouldhappennet
+from . import adapter_forumsspacebattlescom
+# from . import adapter_forumssufficientvelocitycom
+# from . import adapter_forumquestionablequestingcom
+# from . import adapter_ninelivesarchivecom
+# from . import adapter_masseffect2in
+# from . import adapter_quotevcom
+# from . import adapter_mcstoriescom
+# from . import adapter_buffygilescom
+# from . import adapter_andromedawebcom
+# from . import adapter_artemisfowlcom
+# from . import adapter_naiceanilmenet
+# from . import adapter_deepinmysoulnet
+# from . import adapter_kiarepositorymujajinet
+# from . import adapter_adultfanfictionorg
+# from . import adapter_fictionhuntcom
+# from . import adapter_royalroadl
+# from . import adapter_chosentwofanficcom
+# from . import adapter_bdsmlibrarycom
+# from . import adapter_asexstoriescom
+# from . import adapter_gluttonyfictioncom
+# from . import adapter_valentchambercom
+# from . import adapter_looselugscom
+# from . import adapter_wwwgiantessworldnet
+# from . import adapter_lotrgficcom
+# from . import adapter_tomparisdormcom
+# from . import adapter_writingwhimsicalwanderingsnet
+# from . import adapter_sugarquillnet
+# from . import adapter_wwwarea52hkhnet
+# from . import adapter_starslibrarynet
+# from . import adapter_fanficauthorsnet
+# from . import adapter_fireflyfansnet
+# from . import adapter_fireflypopulliorg
+# from . import adapter_sebklainenet
+# from . import adapter_shriftweborgbfa
+# from . import adapter_trekfanfictionnet
+# from . import adapter_wuxiaworldcom
+# from . import adapter_wwwlushstoriescom
+# from . import adapter_wwwutopiastoriescom
+# from . import adapter_sinfuldreamscomunicornfic
+# from . import adapter_sinfuldreamscomwhisperedmuse
+# from . import adapter_sinfuldreamscomwickedtemptation
+# from . import adapter_asianfanficscom
+# from . import adapter_webnovelcom
+# from . import adapter_deandamagecom
+# from . import adapter_imrightbehindyoucom
+# from . import adapter_mttjustoncenet
+# from . import adapter_narutoficorg
+# from . import adapter_starskyhutcharchivenet
+# from . import adapter_swordborderlineangelcom
+# from . import adapter_tasteofpoisoninkubationnet
+# from . import adapter_thebrokenworldorg
+# from . import adapter_thedelphicexpansecom
+# from . import adapter_thundercatsfansorg
+# from . import adapter_unknowableroomorg
+# from . import adapter_www13hoursorg
+# from . import adapter_wwwaneroticstorycom
+# from . import adapter_gravitytalescom
+# from . import adapter_lcfanficcom
+# from . import adapter_noveltrovecom
+# from . import adapter_inkbunnynet
+# from . import adapter_alternatehistorycom
+# from . import adapter_wattpadcom
+# from . import adapter_lightnovelgatecom
+# from . import adapter_wwwnovelallcom
+# from . import adapter_wuxiaworldco
+# from . import adapter_harrypotterfanfictioncom
## This bit of complexity allows adapters to be added by just adding
## importing. It eliminates the long if/else clauses we used to need
diff --git a/fanficfare/adapters/adapter_forumsspacebattlescom.py b/fanficfare/adapters/adapter_forumsspacebattlescom.py
index eaa90bcf..e9e2bc2b 100644
--- a/fanficfare/adapters/adapter_forumsspacebattlescom.py
+++ b/fanficfare/adapters/adapter_forumsspacebattlescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,9 +15,10 @@
# limitations under the License.
#
+from __future__ import absolute_import
import re
-from base_xenforoforum_adapter import BaseXenForoForumAdapter
+from .base_xenforoforum_adapter import BaseXenForoForumAdapter
def getClass():
return ForumsSpacebattlesComAdapter
diff --git a/fanficfare/adapters/base_xenforoforum_adapter.py b/fanficfare/adapters/base_xenforoforum_adapter.py
index 7e7b05b8..dca8d37f 100644
--- a/fanficfare/adapters/base_xenforoforum_adapter.py
+++ b/fanficfare/adapters/base_xenforoforum_adapter.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,17 +15,21 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
from xml.dom.minidom import parseString
+# py2 vs py3 transition
+from six import text_type as unicode
+from six.moves.urllib.error import HTTPError
+
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
logger = logging.getLogger(__name__)
@@ -293,7 +297,7 @@ class BaseXenForoForumAdapter(BaseSiteAdapter):
(data,opened) = self._fetchUrlOpened(useurl)
useurl = opened.geturl()
logger.info("use useurl: "+useurl)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
elif e.code == 403:
@@ -504,7 +508,7 @@ class BaseXenForoForumAdapter(BaseSiteAdapter):
# assumed normalized to /posts/1234/
anchorid = "post-"+url.split('/')[-2]
- logger.debug("anchorid: %s"%anchorid)
+ # logger.debug("anchorid: %s"%anchorid)
souptag = topsoup.find('li',id=anchorid)
else:
logger.debug("post found in cache")
@@ -524,7 +528,7 @@ class BaseXenForoForumAdapter(BaseSiteAdapter):
topsoup = souptag = self.make_soup(data)
- if '#' in url:
+ if '#' in unicode(url):
anchorid = url.split('#')[1]
souptag = topsoup.find('li',id=anchorid)
From 308f9ffe6b02476ca470835a03999cfcb0882aa4 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 30 Jul 2018 14:55:02 -0500
Subject: [PATCH 020/120] Fixes for epub update.
---
fanficfare/cli.py | 4 ++--
fanficfare/epubutils.py | 15 ++++++++++-----
fanficfare/writers/writer_epub.py | 5 +----
3 files changed, 13 insertions(+), 11 deletions(-)
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 0cc97d33..8616470f 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -355,9 +355,9 @@ def do_download(arg,
output_filename = writer.getOutputFileName()
noturl, chaptercount = get_dcsource_chaptercount(output_filename)
print('Updating %s, URL: %s' % (output_filename, url))
- except Exception:
+ except Exception as e:
+ print("Failed to read epub for update: (%s) Continuing with update=false"%e)
options.update = False
- pass
# Check for include_images without no_image_processing. In absence of PIL, give warning.
if adapter.getConfig('include_images') and not adapter.getConfig('no_image_processing'):
diff --git a/fanficfare/epubutils.py b/fanficfare/epubutils.py
index 47eeb983..c4d54c43 100644
--- a/fanficfare/epubutils.py
+++ b/fanficfare/epubutils.py
@@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
__license__ = 'GPL v3'
-__copyright__ = '2017, Jim Miller'
+__copyright__ = '2018, Jim Miller'
__docformat__ = 'restructuredtext en'
import logging
@@ -11,7 +12,11 @@ import re, os, traceback
from collections import defaultdict
from zipfile import ZipFile, ZIP_STORED, ZIP_DEFLATED
from xml.dom.minidom import parseString
-from six import StringIO
+
+# py2 vs py3 transition
+from six import text_type as unicode
+from six import string_types as basestring
+from six import BytesIO # StringIO under py2
import bs4
@@ -158,7 +163,7 @@ def get_update_data(inputio,
chapurl = soup.find('meta',{'name':'chapterurl'})
if chapurl:
if chapurl['content'] not in urlsoups: # keep first found if more than one.
- #print("Found chapurl['content']:%s"%chapurl['content'])
+ # print("Found chapurl['content']:%s"%chapurl['content'])
currenturl = chapurl['content']
urlsoups[chapurl['content']] = bodysoup
else:
@@ -188,7 +193,7 @@ def get_update_data(inputio,
#for k in images.keys():
#print("\tlongdesc:%s\n\tData len:%s\n"%(k,len(images[k])))
- # print("datamaps:%s"%datamaps)
+ print("datamaps:%s"%datamaps)
return (source,filecount,soups,images,oldcover,calibrebookmark,logfile,urlsoups,datamaps)
def get_path_part(n):
@@ -274,7 +279,7 @@ def reset_orig_chapters_epub(inputio,outfile):
inputepub = ZipFile(inputio, 'r') # works equally well with a path or a blob
## build zip in memory in case updating in place(CLI).
- zipio = StringIO()
+ zipio = BytesIO()
## Write mimetype file, must be first and uncompressed.
## Older versions of python(2.4/5) don't allow you to specify
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index e94f27eb..b52bc976 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -579,14 +579,11 @@ div { margin: 0pt; padding: 0pt; }
package.appendChild(guide)
# write content.opf to zip.
- contentxml = contentdom.toxml() # encoding='utf-8'
-
+ contentxml = contentdom.toxml(encoding='utf-8')
# Causes py2 vs py3 issues with encoding nonsense. Skip for now.
# tweak for brain damaged Nook STR. Nook insists on name before content.
# contentxml = contentxml.replace(''%coverimgid,
# ''%coverimgid)
-
-
outputepub.writestr("content.opf",contentxml)
contentdom.unlink()
From 0870e2056f6492962add994f076f4239701c5ab0 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 13:01:35 -0500
Subject: [PATCH 021/120] Tweaks to imports for calibre plugin.
---
fanficfare/adapters/__init__.py | 1 +
fanficfare/configurable.py | 6 +++++-
2 files changed, 6 insertions(+), 1 deletion(-)
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index ceacd2e0..8d90d247 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -24,6 +24,7 @@ from six.moves.urllib.parse import urlparse
logger = logging.getLogger(__name__)
from .. import exceptions as exceptions
+from .. import configurable as configurable
## must import each adapter here.
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 0770617a..2c24b89f 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import re
import exceptions
import codecs
@@ -81,7 +82,10 @@ try:
from . import adapters
except ImportError:
import sys
- adapters = sys.modules["fanficfare.adapters"]
+ if "fanficfare.adapters" in sys.modules:
+ adapters = sys.modules["fanficfare.adapters"]
+ elif "calibre_plugins.fanficfare_plugin.fanficfare.adapters" in sys.modules:
+ adapters = sys.modules["calibre_plugins.fanficfare_plugin.fanficfare.adapters"]
def re_compile(regex,line):
try:
From 8f4cdfe24ac7eb5a5e3c183d78cdfeee91c9b1f3 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 13:44:03 -0500
Subject: [PATCH 022/120] Added own copy of six.py as fanficfare.six for
ensure_str etc.
---
fanficfare/__init__.py | 3 +-
fanficfare/adapters/__init__.py | 3 +-
fanficfare/adapters/adapter_fanfictionnet.py | 4 +-
fanficfare/adapters/base_adapter.py | 6 +-
fanficfare/adapters/base_efiction_adapter.py | 15 +-
.../adapters/base_xenforoforum_adapter.py | 4 +-
fanficfare/cli.py | 8 +-
fanficfare/configurable.py | 20 +-
fanficfare/epubutils.py | 6 +-
fanficfare/geturls.py | 43 +-
fanficfare/gziphttp.py | 11 +-
fanficfare/htmlcleanup.py | 6 +-
fanficfare/htmlheuristics.py | 4 +-
fanficfare/mobi.py | 10 +-
fanficfare/mobihtml.py | 6 +-
fanficfare/six.py | 950 ++++++++++++++++++
fanficfare/story.py | 20 +-
fanficfare/writers/base_writer.py | 6 +-
fanficfare/writers/writer_epub.py | 6 +-
fanficfare/writers/writer_html.py | 2 +-
fanficfare/writers/writer_mobi.py | 2 +-
included_dependencies/six.py | 116 ++-
22 files changed, 1148 insertions(+), 103 deletions(-)
create mode 100644 fanficfare/six.py
diff --git a/fanficfare/__init__.py b/fanficfare/__init__.py
index 9784e911..c6d2afca 100644
--- a/fanficfare/__init__.py
+++ b/fanficfare/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 Fanficdownloader team, 2016 FanFicFare team
+# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
@@ -14,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+from __future__ import absolute_import
try:
# just a way to switch between web service and CLI/PI
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index 8d90d247..2ef031d7 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
import os, re, sys, glob, types
from os.path import dirname, basename, normpath
import logging
-from six.moves.urllib.parse import urlparse
+from ..six.moves.urllib.parse import urlparse
logger = logging.getLogger(__name__)
@@ -28,6 +28,7 @@ from .. import configurable as configurable
## must import each adapter here.
+from . import base_efiction_adapter
from . import adapter_test1
from . import adapter_fanfictionnet
# from . import adapter_fanficcastletvnet
diff --git a/fanficfare/adapters/adapter_fanfictionnet.py b/fanficfare/adapters/adapter_fanfictionnet.py
index 79633699..8fa6dc77 100644
--- a/fanficfare/adapters/adapter_fanfictionnet.py
+++ b/fanficfare/adapters/adapter_fanfictionnet.py
@@ -22,8 +22,8 @@ logger = logging.getLogger(__name__)
import re
# py2 vs py3 transition
-from six import text_type as unicode
-from six.moves.urllib.error import HTTPError
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
from .. import exceptions as exceptions
diff --git a/fanficfare/adapters/base_adapter.py b/fanficfare/adapters/base_adapter.py
index 947ead1e..6c90520a 100644
--- a/fanficfare/adapters/base_adapter.py
+++ b/fanficfare/adapters/base_adapter.py
@@ -21,11 +21,11 @@ from datetime import datetime, timedelta
from collections import defaultdict
# py2 vs py3 transition
-from six import text_type as unicode
-from six import string_types as basestring
+from ..six import text_type as unicode
+from ..six import string_types as basestring
+from ..six.moves.urllib.parse import urlparse
import logging
-from six.moves.urllib.parse import urlparse
from functools import partial
import traceback
import copy
diff --git a/fanficfare/adapters/base_efiction_adapter.py b/fanficfare/adapters/base_efiction_adapter.py
index 69df5ce8..fd128617 100644
--- a/fanficfare/adapters/base_efiction_adapter.py
+++ b/fanficfare/adapters/base_efiction_adapter.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,18 +16,21 @@
#
# Software: eFiction
-# import time
-# import urllib
+from __future__ import absolute_import
+
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import bs4 as bs
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
+from .base_adapter import BaseSiteAdapter, makeDate
"""
This is a generic adapter for eFiction based archives (see
@@ -216,7 +219,7 @@ class BaseEfictionAdapter(BaseSiteAdapter):
"""
try:
html = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/base_xenforoforum_adapter.py b/fanficfare/adapters/base_xenforoforum_adapter.py
index dca8d37f..d5ea1fec 100644
--- a/fanficfare/adapters/base_xenforoforum_adapter.py
+++ b/fanficfare/adapters/base_xenforoforum_adapter.py
@@ -23,8 +23,8 @@ import re
from xml.dom.minidom import parseString
# py2 vs py3 transition
-from six import text_type as unicode
-from six.moves.urllib.error import HTTPError
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 8616470f..00b23f54 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -21,8 +21,6 @@ from optparse import OptionParser, SUPPRESS_HELP
from os.path import expanduser, join, dirname
from os import access, R_OK
from subprocess import call
-from six import StringIO
-from six.moves import configparser
import getpass
import logging
import pprint
@@ -64,12 +62,18 @@ try:
from calibre_plugins.fanficfare_plugin.fanficfare.epubutils import (
get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
from calibre_plugins.fanficfare_plugin.fanficfare.geturls import get_urls_from_page, get_urls_from_imap
+ from calibre_plugins.fanficfare_plugin.fanficfare.six import StringIO
+ from calibre_plugins.fanficfare_plugin.fanficfare.six.moves import configparser
+ from calibre_plugins.fanficfare_plugin.fanficfare.six.moves import http_cookiejar as cl
except ImportError:
from fanficfare import adapters, writers, exceptions
from fanficfare.configurable import Configuration
from fanficfare.epubutils import (
get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
from fanficfare.geturls import get_urls_from_page, get_urls_from_imap
+ from fanficfare.six import StringIO
+ from fanficfare.six.moves import configparser
+ from fanficfare.six.moves import http_cookiejar as cl
def write_story(config, adapter, writeformat, metaonly=False, outstream=None):
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 2c24b89f..dd572dca 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -21,15 +21,15 @@ import exceptions
import codecs
# py2 vs py3 transition
-import six
-from six.moves import configparser
-from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
-from six.moves import urllib
-from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
-from six.moves.urllib.error import HTTPError
-from six.moves import http_cookiejar as cl
-from six import text_type as unicode
-from six import string_types as basestring
+from . import six
+from .six.moves import configparser
+from .six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
+from .six.moves import urllib
+from .six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
+from .six.moves.urllib.error import HTTPError
+from .six.moves import http_cookiejar as cl
+from .six import text_type as unicode
+from .six import string_types as basestring
import time
import logging
@@ -57,7 +57,7 @@ try:
except ImportError:
chardet = None
-from gziphttp import GZipProcessor
+from .gziphttp import GZipProcessor
# All of the writers(epub,html,txt) and adapters(ffnet,twlt,etc)
# inherit from Configurable. The config file(s) uses ini format:
diff --git a/fanficfare/epubutils.py b/fanficfare/epubutils.py
index c4d54c43..5cb6776f 100644
--- a/fanficfare/epubutils.py
+++ b/fanficfare/epubutils.py
@@ -14,9 +14,9 @@ from zipfile import ZipFile, ZIP_STORED, ZIP_DEFLATED
from xml.dom.minidom import parseString
# py2 vs py3 transition
-from six import text_type as unicode
-from six import string_types as basestring
-from six import BytesIO # StringIO under py2
+from .six import text_type as unicode
+from .six import string_types as basestring
+from .six import BytesIO # StringIO under py2
import bs4
diff --git a/fanficfare/geturls.py b/fanficfare/geturls.py
index f55f21dd..d0175830 100644
--- a/fanficfare/geturls.py
+++ b/fanficfare/geturls.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,24 +15,27 @@
# limitations under the License.
#
+from __future__ import absolute_import
import collections
import email
import imaplib
import re
-from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
-from six.moves.urllib.parse import (urlparse, urlunparse)
+from .six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
+from .six.moves.urllib.parse import (urlparse, urlunparse)
# unicode in py2, str in py3
-from six import text_type as unicode
+from .six import text_type as unicode
+
+from .six import ensure_str
import logging
logger = logging.getLogger(__name__)
from bs4 import BeautifulSoup
-from gziphttp import GZipProcessor
+from .gziphttp import GZipProcessor
from . import adapters
-from configurable import Configuration
-from exceptions import UnknownSite
+from .configurable import Configuration
+from .exceptions import UnknownSite
def get_urls_from_page(url,configuration=None,normalize=False):
@@ -120,12 +123,12 @@ def get_urls_from_html(data,url=None,configuration=None,normalize=False,restrict
# Simply return the longest URL with the assumption that it contains the
# most user readable metadata, if not normalized
- return urls.keys() if normalize else [max(value, key=len) for key, value in urls.items()]
+ return list(urls.keys()) if normalize else [max(value, key=len) for key, value in urls.items()]
def get_urls_from_text(data,configuration=None,normalize=False,email=False):
urls = collections.OrderedDict()
try:
- data = unicode(data)
+ data = ensure_str(data)
except UnicodeDecodeError:
data=data.decode('utf8') ## for when called outside calibre.
@@ -145,7 +148,7 @@ def get_urls_from_text(data,configuration=None,normalize=False,email=False):
# Simply return the longest URL with the assumption that it contains the
# most user readable metadata, if not normalized
- return urls.keys() if normalize else [max(value, key=len) for key, value in urls.items()]
+ return list(urls.keys()) if normalize else [max(value, key=len) for key, value in urls.items()]
def form_url(parenturl,url):
@@ -194,7 +197,7 @@ def cleanup_url(href,email=False):
def get_urls_from_imap(srv,user,passwd,folder,markread=True):
- logger.debug("get_urls_from_imap srv:(%s)"%srv)
+ # logger.debug("get_urls_from_imap srv:(%s)"%srv)
mail = imaplib.IMAP4_SSL(srv)
mail.login(user, passwd)
mail.list()
@@ -217,33 +220,31 @@ def get_urls_from_imap(srv,user,passwd,folder,markread=True):
result, data = mail.uid('fetch', email_uid, '(BODY.PEEK[])') #RFC822
- #logger.debug("result:%s"%result)
- #logger.debug("data:%s"%data)
+ # logger.debug("result:%s"%result)
+ # logger.debug("data:%s"%data)
raw_email = data[0][1]
#raw_email = data[0][1] # here's the body, which is raw text of the whole email
# including headers and alternate payloads
- email_message = email.message_from_string(raw_email)
+ email_message = email.message_from_string(ensure_str(raw_email))
- #logger.debug "To:%s"%email_message['To']
- #logger.debug "From:%s"%email_message['From']
- #logger.debug "Subject:%s"%email_message['Subject']
-
- # logger.debug("payload:%s"%email_message.get_payload())
+ # logger.debug("To:%s"%email_message['To'])
+ # logger.debug("From:%s"%email_message['From'])
+ # logger.debug("Subject:%s"%email_message['Subject'])
+ # logger.debug("payload:%r"%email_message.get_payload(decode=True))
urllist=[]
for part in email_message.walk():
try:
- #logger.debug("part mime:%s"%part.get_content_type())
+ # logger.debug("part mime:%s"%part.get_content_type())
if part.get_content_type() == 'text/plain':
urllist.extend(get_urls_from_text(part.get_payload(decode=True),email=True))
if part.get_content_type() == 'text/html':
urllist.extend(get_urls_from_html(part.get_payload(decode=True),email=True))
except Exception as e:
logger.error("Failed to read email content: %s"%e,exc_info=True)
- #logger.debug "urls:%s"%get_urls_from_text(get_first_text_block(email_message))
if urllist and markread:
#obj.store(data[0].replace(' ',','),'+FLAGS','\Seen')
diff --git a/fanficfare/gziphttp.py b/fanficfare/gziphttp.py
index ddc2ef44..07aeb471 100644
--- a/fanficfare/gziphttp.py
+++ b/fanficfare/gziphttp.py
@@ -1,9 +1,12 @@
-## Borrowed from http://techknack.net/python-urllib2-handlers/
+# -*- coding: utf-8 -*-
+
+## Borrowed from http://techknack.net/python-urllib2-handlers/
+from __future__ import absolute_import
-from six.moves.urllib.request import BaseHandler
-from six.moves.urllib.response import addinfourl
from gzip import GzipFile
-from six import BytesIO
+from .six.moves.urllib.request import BaseHandler
+from .six.moves.urllib.response import addinfourl
+from .six import BytesIO
class GZipProcessor(BaseHandler):
"""A handler to add gzip capabilities to urllib2 requests
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index 3d1bc41b..44ee5983 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -22,9 +22,9 @@ logger = logging.getLogger(__name__)
import re
# py2 vs py3 transition
-from six import text_type as unicode
-from six import string_types as basestring
-from six import unichr
+from .six import text_type as unicode
+from .six import string_types as basestring
+from .six import unichr
def _unirepl(match):
"Return the unicode string for a decimal number"
diff --git a/fanficfare/htmlheuristics.py b/fanficfare/htmlheuristics.py
index a14e918a..179790fb 100644
--- a/fanficfare/htmlheuristics.py
+++ b/fanficfare/htmlheuristics.py
@@ -23,8 +23,8 @@ import codecs
import bs4 as bs
# py2 vs py3 transition
-from six import text_type as unicode
-from six.moves import range
+from .six import text_type as unicode
+from .six.moves import range
from . import HtmlTagStack as stack
from . import exceptions as exceptions
diff --git a/fanficfare/mobi.py b/fanficfare/mobi.py
index adc95096..24a6c41b 100644
--- a/fanficfare/mobi.py
+++ b/fanficfare/mobi.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# Copyright(c) 2009 Andrew Chatham and Vijay Pandurangan
# Changes Copyright 2018 FanFicFare team
-
+from __future__ import absolute_import
import struct
import time
@@ -9,13 +9,13 @@ import random
import logging
# py2 vs py3 transition
-from six import text_type as unicode
-from six import string_types as basestring
-from six import BytesIO # StringIO under py2
+from .six import text_type as unicode
+from .six import string_types as basestring
+from .six import BytesIO # StringIO under py2
logger = logging.getLogger(__name__)
-from mobihtml import HtmlProcessor
+from .mobihtml import HtmlProcessor
# http://wiki.mobileread.com/wiki/MOBI
# http://membres.lycos.fr/microfirst/palm/pdb.html
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index c0dcf029..7f6d4a65 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -5,11 +5,11 @@
import re
import sys
-from six.moves.urllib.parse import unquote
+from .six.moves.urllib.parse import unquote
# py2 vs py3 transition
-from six import text_type as unicode
-from six import binary_type as bytes
+from .six import text_type as unicode
+from .six import binary_type as bytes
# import bs4
# BeautifulSoup = bs4.BeautifulSoup
diff --git a/fanficfare/six.py b/fanficfare/six.py
new file mode 100644
index 00000000..0691cea7
--- /dev/null
+++ b/fanficfare/six.py
@@ -0,0 +1,950 @@
+# Copyright (c) 2010-2018 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson "
+__version__ = "1.11.0fff" # for version included in fanficfare
+print("fff six")
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ if from_value is None:
+ raise value
+ raise value from from_value
+ finally:
+ value = None
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif isinstance(s, binary_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ if PY2 and isinstance(s, text_type):
+ s = s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ s = s.decode(encoding, errors)
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/fanficfare/story.py b/fanficfare/story.py
index 6fa49fb9..962762bc 100644
--- a/fanficfare/story.py
+++ b/fanficfare/story.py
@@ -28,11 +28,11 @@ import logging
logger = logging.getLogger(__name__)
# py2 vs py3 transition
-import six
-from six.moves.urllib.parse import (urlparse, urlunparse)
-from six import text_type as unicode
-from six import string_types as basestring
-from six.moves import map
+from . import six
+from .six.moves.urllib.parse import (urlparse, urlunparse)
+from .six import text_type as unicode
+from .six import string_types as basestring
+from .six.moves import map
import bs4
@@ -57,13 +57,13 @@ imagetypes = {
try:
from calibre.utils.magick import Image
- from six import BytesIO
+ from .six import BytesIO
from gif import GifInfo, CHECK_IS_ANIMATED
convtype = {'jpg':'JPG', 'png':'PNG'}
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
- logger.debug("calibre convert_image called")
+ # logger.debug("calibre convert_image called")
if url.lower().endswith('.svg'):
raise exceptions.RejectImage("Calibre image processing chokes on SVG images.")
@@ -107,11 +107,11 @@ except:
# No calibre routines, try for Pillow for CLI.
try:
from PIL import Image
- from six import BytesIO
+ from .six import BytesIO
convtype = {'jpg':'JPEG', 'png':'PNG'}
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
- logger.debug("Pillow convert_image called")
+ # logger.debug("Pillow convert_image called")
export = False
img = Image.open(BytesIO(data))
@@ -151,7 +151,7 @@ except:
# No calibre or PIL, simple pass through with mimetype.
def convert_image(url,data,sizes,grayscale,
removetrans,imgtype="jpg",background='#ffffff'):
- logger.debug("NO convert_image called")
+ # logger.debug("NO convert_image called")
return no_convert_image(url,data)
## also used for explicit no image processing.
diff --git a/fanficfare/writers/base_writer.py b/fanficfare/writers/base_writer.py
index 12eada78..d0b67d4c 100644
--- a/fanficfare/writers/base_writer.py
+++ b/fanficfare/writers/base_writer.py
@@ -20,14 +20,14 @@ import re
import os.path
import datetime
import string
-import six
import zipfile
from zipfile import ZipFile, ZIP_DEFLATED
import logging
# py2 vs py3 transition
-from six import text_type as unicode
-from six import BytesIO # StringIO under py2
+from .. import six
+from ..six import text_type as unicode
+from ..six import BytesIO # StringIO under py2
from ..configurable import Configurable
from ..htmlcleanup import removeEntities, removeAllEntities, stripHTML
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index b52bc976..e76e4dc8 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -24,9 +24,9 @@ import urllib
import re
# py2 vs py3 transition
-from six import text_type as unicode
-from six import string_types as basestring
-from six import BytesIO # StringIO under py2
+from ..six import text_type as unicode
+from ..six import string_types as basestring
+from ..six import BytesIO # StringIO under py2
## XML isn't as forgiving as HTML, so rather than generate as strings,
## use DOM to generate the XML files.
diff --git a/fanficfare/writers/writer_html.py b/fanficfare/writers/writer_html.py
index 39c2d78d..04615394 100644
--- a/fanficfare/writers/writer_html.py
+++ b/fanficfare/writers/writer_html.py
@@ -20,7 +20,7 @@ import logging
import string
# py2 vs py3 transition
-from six import text_type as unicode
+from ..six import text_type as unicode
import bs4
diff --git a/fanficfare/writers/writer_mobi.py b/fanficfare/writers/writer_mobi.py
index 3806f077..bc86bb5d 100644
--- a/fanficfare/writers/writer_mobi.py
+++ b/fanficfare/writers/writer_mobi.py
@@ -25,7 +25,7 @@ from ..mobi import Converter
from ..exceptions import FailedToWriteOutput
# py2 vs py3 transition
-from six import BytesIO # StringIO under py2
+from ..six import BytesIO # StringIO under py2
logger = logging.getLogger(__name__)
diff --git a/included_dependencies/six.py b/included_dependencies/six.py
index 190c0239..7681506e 100644
--- a/included_dependencies/six.py
+++ b/included_dependencies/six.py
@@ -1,6 +1,4 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-# Copyright (c) 2010-2015 Benjamin Peterson
+# Copyright (c) 2010-2018 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -20,6 +18,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
+"""Utilities for writing code that runs on Python 2 and 3"""
+
from __future__ import absolute_import
import functools
@@ -29,8 +29,8 @@ import sys
import types
__author__ = "Benjamin Peterson "
-__version__ = "1.10.0"
-
+__version__ = "1.11.0fffinclib" # for version included in fanficfare
+print("included_dependencies six")
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
@@ -241,6 +241,7 @@ _moved_attributes = [
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
@@ -262,10 +263,11 @@ _moved_attributes = [
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
@@ -337,10 +339,12 @@ _urllib_parse_moved_attributes = [
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
@@ -416,6 +420,8 @@ _urllib_request_moved_attributes = [
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
@@ -679,11 +685,15 @@ if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
else:
def exec_(_code_, _globs_=None, _locs_=None):
@@ -699,19 +709,28 @@ else:
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
- raise tp, value, tb
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
- if from_value is None:
- raise value
- raise value from from_value
+ try:
+ if from_value is None:
+ raise value
+ raise value from from_value
+ finally:
+ value = None
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
- raise value from from_value
+ try:
+ raise value from from_value
+ finally:
+ value = None
""")
else:
def raise_from(value, from_value):
@@ -802,10 +821,14 @@ def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
- class metaclass(meta):
+ class metaclass(type):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
return type.__new__(metaclass, 'temporary_class', (), {})
@@ -825,6 +848,65 @@ def add_metaclass(metaclass):
return wrapper
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif isinstance(s, binary_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ if PY2 and isinstance(s, text_type):
+ s = s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ s = s.decode(encoding, errors)
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
From d283290cbfb900d1903faf43bf4f5dd431b6c94f Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 13:45:03 -0500
Subject: [PATCH 023/120] Bump ALPHA Version 2.37.0--Python 2/3 dual version.
---
calibre-plugin/__init__.py | 2 +-
fanficfare/cli.py | 5 +----
setup.py | 2 +-
webservice/app.yaml | 2 +-
4 files changed, 4 insertions(+), 7 deletions(-)
diff --git a/calibre-plugin/__init__.py b/calibre-plugin/__init__.py
index c66f1b9d..66da2095 100644
--- a/calibre-plugin/__init__.py
+++ b/calibre-plugin/__init__.py
@@ -33,7 +33,7 @@ except NameError:
from calibre.customize import InterfaceActionBase
# pulled out from FanFicFareBase for saving in prefs.py
-__version__ = (2, 27, 12)
+__version__ = (2, 37, 0)
## Apparently the name for this class doesn't matter--it was still
## 'demo' for the first few versions.
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 00b23f54..5276d7c1 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -40,12 +40,9 @@ else: # > 3.0
def pickle_load(f):
return pickle.load(f,encoding="bytes")
-from six.moves import http_cookiejar as cl
-
-version="2.27.12"
+version="2.37.0"
os.environ['CURRENT_VERSION_ID']=version
-
if sys.version_info >= (2, 7):
# suppresses default logger. Logging is setup in fanficfare/__init__.py so it works in calibre, too.
rootlogger = logging.getLogger()
diff --git a/setup.py b/setup.py
index 13aa4922..897970ec 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ setup(
name="FanFicFare",
# Versions should comply with PEP440.
- version="2.27.12",
+ version="2.37.0",
description='A tool for downloading fanfiction to eBook formats',
long_description=long_description,
diff --git a/webservice/app.yaml b/webservice/app.yaml
index 8953db00..10b29ede 100644
--- a/webservice/app.yaml
+++ b/webservice/app.yaml
@@ -1,6 +1,6 @@
# ffd-retief-hrd fanficfare
application: fanficfare
-version: 2-27-12
+version: 2-37-0
runtime: python27
api_version: 1
threadsafe: true
From 1ee5c36690dd808dad0584ea4abf2f03ead84a96 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 14:38:50 -0500
Subject: [PATCH 024/120] Add absolute_import imports.
---
fanficfare/HtmlTagStack.py | 21 +++++++++++++++++--
.../adapters/adapter_adastrafanficcom.py | 1 +
.../adapters/adapter_adultfanfictionorg.py | 1 +
.../adapters/adapter_alternatehistorycom.py | 1 +
.../adapters/adapter_andromedawebcom.py | 1 +
.../adapters/adapter_archiveofourownorg.py | 1 +
.../adapters/adapter_archiveskyehawkecom.py | 1 +
fanficfare/adapters/adapter_artemisfowlcom.py | 1 +
fanficfare/adapters/adapter_asexstoriescom.py | 1 +
.../adapter_ashwindersycophanthexcom.py | 1 +
.../adapters/adapter_asianfanficscom.py | 1 +
.../adapters/adapter_asr3slashzoneorg.py | 1 +
fanficfare/adapters/adapter_bdsmlibrarycom.py | 1 +
.../adapters/adapter_bloodshedversecom.py | 1 +
.../adapters/adapter_bloodtiesfancom.py | 1 +
fanficfare/adapters/adapter_buffygilescom.py | 1 +
.../adapters/adapter_chaossycophanthexcom.py | 1 +
.../adapters/adapter_chosentwofanficcom.py | 1 +
.../adapters/adapter_csiforensicscom.py | 1 +
fanficfare/adapters/adapter_darksolaceorg.py | 1 +
fanficfare/adapters/adapter_deandamagecom.py | 1 +
.../adapters/adapter_deepinmysoulnet.py | 1 +
.../adapters/adapter_destinysgatewaycom.py | 1 +
fanficfare/adapters/adapter_dokugacom.py | 1 +
.../adapters/adapter_dracoandginnycom.py | 1 +
fanficfare/adapters/adapter_dramioneorg.py | 1 +
.../adapters/adapter_efictionestelielde.py | 1 +
fanficfare/adapters/adapter_efpfanficnet.py | 1 +
.../adapter_erosnsapphosycophanthexcom.py | 1 +
.../adapters/adapter_fanficauthorsnet.py | 1 +
.../adapters/adapter_fanficcastletvnet.py | 1 +
fanficfare/adapters/adapter_fanfichu.py | 1 +
.../adapters/adapter_fanfictionjunkiesde.py | 1 +
fanficfare/adapters/adapter_fanfiktionde.py | 1 +
fanficfare/adapters/adapter_fannation.py | 1 +
fanficfare/adapters/adapter_fhsarchivecom.py | 1 +
fanficfare/adapters/adapter_ficbooknet.py | 1 +
.../adapters/adapter_fictionalleyorg.py | 1 +
fanficfare/adapters/adapter_fictionhuntcom.py | 1 +
fanficfare/adapters/adapter_fictionmaniatv.py | 1 +
fanficfare/adapters/adapter_fictionpadcom.py | 1 +
.../adapters/adapter_fictionpresscom.py | 1 +
fanficfare/adapters/adapter_ficwadcom.py | 1 +
fanficfare/adapters/adapter_fimfictionnet.py | 1 +
fanficfare/adapters/adapter_finestoriescom.py | 1 +
fanficfare/adapters/adapter_fireflyfansnet.py | 1 +
.../adapters/adapter_fireflypopulliorg.py | 1 +
.../adapter_forumquestionablequestingcom.py | 1 +
.../adapter_forumssufficientvelocitycom.py | 1 +
.../adapters/adapter_gluttonyfictioncom.py | 1 +
.../adapters/adapter_gravitytalescom.py | 1 +
.../adapter_harrypotterfanfictioncom.py | 1 +
fanficfare/adapters/adapter_hlfictionnet.py | 1 +
.../adapters/adapter_hpfanficarchivecom.py | 1 +
fanficfare/adapters/adapter_iketernalnet.py | 1 +
fanficfare/adapters/adapter_imagineeficcom.py | 1 +
.../adapters/adapter_imrightbehindyoucom.py | 1 +
fanficfare/adapters/adapter_inkbunnynet.py | 1 +
.../adapters/adapter_itcouldhappennet.py | 1 +
.../adapter_kiarepositorymujajinet.py | 1 +
fanficfare/adapters/adapter_ksarchivecom.py | 1 +
fanficfare/adapters/adapter_lcfanficcom.py | 1 +
.../adapters/adapter_libraryofmoriacom.py | 1 +
.../adapters/adapter_lightnovelgatecom.py | 1 +
fanficfare/adapters/adapter_literotica.py | 1 +
fanficfare/adapters/adapter_looselugscom.py | 1 +
.../adapters/adapter_lotrfanfictioncom.py | 1 +
fanficfare/adapters/adapter_lotrgficcom.py | 1 +
.../adapters/adapter_lumossycophanthexcom.py | 1 +
fanficfare/adapters/adapter_masseffect2in.py | 1 +
fanficfare/adapters/adapter_mcstoriescom.py | 1 +
fanficfare/adapters/adapter_mediaminerorg.py | 1 +
.../adapters/adapter_merlinficdtwinscouk.py | 1 +
.../adapters/adapter_midnightwhispers.py | 1 +
fanficfare/adapters/adapter_mttjustoncenet.py | 1 +
fanficfare/adapters/adapter_naiceanilmenet.py | 1 +
fanficfare/adapters/adapter_narutoficorg.py | 1 +
.../adapters/adapter_nationallibrarynet.py | 1 +
fanficfare/adapters/adapter_ncisficcom.py | 1 +
fanficfare/adapters/adapter_ncisfictioncom.py | 1 +
.../adapters/adapter_nfacommunitycom.py | 1 +
.../adapters/adapter_nhamagicalworldsus.py | 1 +
.../adapters/adapter_ninelivesarchivecom.py | 1 +
.../adapters/adapter_nocturnallightnet.py | 1 +
fanficfare/adapters/adapter_noveltrovecom.py | 1 +
.../adapter_occlumencysycophanthexcom.py | 1 +
fanficfare/adapters/adapter_phoenixsongnet.py | 1 +
.../adapters/adapter_ponyfictionarchivenet.py | 1 +
.../adapters/adapter_potionsandsnitches.py | 1 +
fanficfare/adapters/adapter_potterficscom.py | 1 +
.../adapter_potterheadsanonymouscom.py | 1 +
.../adapters/adapter_pretendercentrecom.py | 1 +
fanficfare/adapters/adapter_qafficcom.py | 1 +
fanficfare/adapters/adapter_quotevcom.py | 1 +
fanficfare/adapters/adapter_royalroadl.py | 1 +
fanficfare/adapters/adapter_samandjacknet.py | 1 +
.../adapters/adapter_scarvesandcoffeenet.py | 1 +
fanficfare/adapters/adapter_sebklainenet.py | 1 +
.../adapters/adapter_sheppardweircom.py | 1 +
.../adapters/adapter_shriftweborgbfa.py | 1 +
.../adapter_sinfuldreamscomunicornfic.py | 1 +
.../adapter_sinfuldreamscomwhisperedmuse.py | 1 +
...adapter_sinfuldreamscomwickedtemptation.py | 1 +
fanficfare/adapters/adapter_siyecouk.py | 1 +
fanficfare/adapters/adapter_spikeluvercom.py | 1 +
fanficfare/adapters/adapter_squidgeorgpeja.py | 1 +
.../adapter_starskyhutcharchivenet.py | 1 +
.../adapters/adapter_starslibrarynet.py | 1 +
.../adapters/adapter_storiesofardacom.py | 1 +
.../adapters/adapter_storiesonlinenet.py | 1 +
fanficfare/adapters/adapter_sugarquillnet.py | 1 +
.../adapter_swordborderlineangelcom.py | 1 +
.../adapter_tasteofpoisoninkubationnet.py | 1 +
.../adapters/adapter_tenhawkpresentscom.py | 1 +
fanficfare/adapters/adapter_test1.py | 1 +
fanficfare/adapters/adapter_tgstorytimecom.py | 1 +
.../adapters/adapter_thebrokenworldorg.py | 1 +
.../adapters/adapter_thedelphicexpansecom.py | 1 +
.../adapters/adapter_thehookupzonenet.py | 1 +
.../adapters/adapter_themaplebookshelf.py | 1 +
fanficfare/adapters/adapter_themasquenet.py | 1 +
.../adapters/adapter_thepetulantpoetesscom.py | 1 +
.../adapters/adapter_thundercatsfansorg.py | 1 +
.../adapters/adapter_tolkienfanfiction.py | 1 +
.../adapters/adapter_tomparisdormcom.py | 1 +
.../adapters/adapter_trekfanfictionnet.py | 1 +
fanficfare/adapters/adapter_trekiverseorg.py | 1 +
fanficfare/adapters/adapter_tthfanficorg.py | 1 +
.../adapters/adapter_twilightarchivescom.py | 1 +
fanficfare/adapters/adapter_twilightednet.py | 1 +
.../adapters/adapter_unknowableroomorg.py | 1 +
.../adapters/adapter_valentchambercom.py | 1 +
.../adapters/adapter_voracity2eficcom.py | 1 +
.../adapters/adapter_walkingtheplankorg.py | 1 +
fanficfare/adapters/adapter_webnovelcom.py | 1 +
fanficfare/adapters/adapter_whoficcom.py | 1 +
.../adapters/adapter_wolverineandroguecom.py | 1 +
fanficfare/adapters/adapter_wraithbaitcom.py | 1 +
.../adapter_writingwhimsicalwanderingsnet.py | 1 +
fanficfare/adapters/adapter_wuxiaworldco.py | 1 +
fanficfare/adapters/adapter_wuxiaworldcom.py | 1 +
fanficfare/adapters/adapter_www13hoursorg.py | 1 +
.../adapters/adapter_wwwaneroticstorycom.py | 1 +
.../adapters/adapter_wwwarea52hkhnet.py | 1 +
.../adapters/adapter_wwwgiantessworldnet.py | 1 +
.../adapters/adapter_wwwlushstoriescom.py | 1 +
fanficfare/adapters/adapter_wwwnovelallcom.py | 1 +
.../adapters/adapter_wwwutopiastoriescom.py | 1 +
fanficfare/dateutils.py | 19 +++++++++++++++++
fanficfare/exceptions.py | 4 +++-
fanficfare/mobihtml.py | 2 ++
fanficfare/translit.py | 2 ++
152 files changed, 192 insertions(+), 3 deletions(-)
diff --git a/fanficfare/HtmlTagStack.py b/fanficfare/HtmlTagStack.py
index 3a9e703a..64a7953b 100644
--- a/fanficfare/HtmlTagStack.py
+++ b/fanficfare/HtmlTagStack.py
@@ -1,4 +1,21 @@
-# coding: utf-8
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 FanFicFare team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
import re
import codecs
@@ -54,4 +71,4 @@ def flush():
del stack[:]
def get_stack():
- return stack
\ No newline at end of file
+ return stack
diff --git a/fanficfare/adapters/adapter_adastrafanficcom.py b/fanficfare/adapters/adapter_adastrafanficcom.py
index e679cda5..b4b9b56d 100644
--- a/fanficfare/adapters/adapter_adastrafanficcom.py
+++ b/fanficfare/adapters/adapter_adastrafanficcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_adultfanfictionorg.py b/fanficfare/adapters/adapter_adultfanfictionorg.py
index aabd1b20..280bd497 100644
--- a/fanficfare/adapters/adapter_adultfanfictionorg.py
+++ b/fanficfare/adapters/adapter_adultfanfictionorg.py
@@ -17,6 +17,7 @@
################################################################################
### Written by GComyn
################################################################################
+from __future__ import absolute_import
from __future__ import unicode_literals
import time
import logging
diff --git a/fanficfare/adapters/adapter_alternatehistorycom.py b/fanficfare/adapters/adapter_alternatehistorycom.py
index 339e0afd..f7824377 100644
--- a/fanficfare/adapters/adapter_alternatehistorycom.py
+++ b/fanficfare/adapters/adapter_alternatehistorycom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from adapter_forumquestionablequestingcom import QuestionablequestingComAdapter
def getClass():
diff --git a/fanficfare/adapters/adapter_andromedawebcom.py b/fanficfare/adapters/adapter_andromedawebcom.py
index 46d793d3..5cfc827f 100644
--- a/fanficfare/adapters/adapter_andromedawebcom.py
+++ b/fanficfare/adapters/adapter_andromedawebcom.py
@@ -18,6 +18,7 @@
# ####### webpage.
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_archiveofourownorg.py b/fanficfare/adapters/adapter_archiveofourownorg.py
index a5b46b18..e131258f 100644
--- a/fanficfare/adapters/adapter_archiveofourownorg.py
+++ b/fanficfare/adapters/adapter_archiveofourownorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_archiveskyehawkecom.py b/fanficfare/adapters/adapter_archiveskyehawkecom.py
index ea66db2f..f324e4c6 100644
--- a/fanficfare/adapters/adapter_archiveskyehawkecom.py
+++ b/fanficfare/adapters/adapter_archiveskyehawkecom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_artemisfowlcom.py b/fanficfare/adapters/adapter_artemisfowlcom.py
index c3c2da67..d25257dc 100644
--- a/fanficfare/adapters/adapter_artemisfowlcom.py
+++ b/fanficfare/adapters/adapter_artemisfowlcom.py
@@ -18,6 +18,7 @@
# ####### webpage.
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_asexstoriescom.py b/fanficfare/adapters/adapter_asexstoriescom.py
index b965cc32..5e8ef953 100644
--- a/fanficfare/adapters/adapter_asexstoriescom.py
+++ b/fanficfare/adapters/adapter_asexstoriescom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
index 986977ad..cf42a680 100644
--- a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
+++ b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_asianfanficscom.py b/fanficfare/adapters/adapter_asianfanficscom.py
index 3fc39a87..69f98cd1 100644
--- a/fanficfare/adapters/adapter_asianfanficscom.py
+++ b/fanficfare/adapters/adapter_asianfanficscom.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_asr3slashzoneorg.py b/fanficfare/adapters/adapter_asr3slashzoneorg.py
index 0faeeff6..c519af29 100644
--- a/fanficfare/adapters/adapter_asr3slashzoneorg.py
+++ b/fanficfare/adapters/adapter_asr3slashzoneorg.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_bdsmlibrarycom.py b/fanficfare/adapters/adapter_bdsmlibrarycom.py
index ede7e94f..a45d8324 100644
--- a/fanficfare/adapters/adapter_bdsmlibrarycom.py
+++ b/fanficfare/adapters/adapter_bdsmlibrarycom.py
@@ -23,6 +23,7 @@
### Fixed the removal of the extra tags from some of the stories and
### removed the attributes from the paragraph and span tags
###########################################################################
+from __future__ import absolute_import
'''
This works, but some of the stories have abysmal formatting, so it would
probably need to be edited for reading.
diff --git a/fanficfare/adapters/adapter_bloodshedversecom.py b/fanficfare/adapters/adapter_bloodshedversecom.py
index 7c561274..b040eba6 100644
--- a/fanficfare/adapters/adapter_bloodshedversecom.py
+++ b/fanficfare/adapters/adapter_bloodshedversecom.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from datetime import timedelta
import re
import urllib2
diff --git a/fanficfare/adapters/adapter_bloodtiesfancom.py b/fanficfare/adapters/adapter_bloodtiesfancom.py
index 31484159..533b781f 100644
--- a/fanficfare/adapters/adapter_bloodtiesfancom.py
+++ b/fanficfare/adapters/adapter_bloodtiesfancom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_buffygilescom.py b/fanficfare/adapters/adapter_buffygilescom.py
index 60727150..54669ce1 100644
--- a/fanficfare/adapters/adapter_buffygilescom.py
+++ b/fanficfare/adapters/adapter_buffygilescom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_chaossycophanthexcom.py b/fanficfare/adapters/adapter_chaossycophanthexcom.py
index c77e23b6..8a0a1630 100644
--- a/fanficfare/adapters/adapter_chaossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_chaossycophanthexcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_chosentwofanficcom.py b/fanficfare/adapters/adapter_chosentwofanficcom.py
index d292b0f4..2e0e57b3 100644
--- a/fanficfare/adapters/adapter_chosentwofanficcom.py
+++ b/fanficfare/adapters/adapter_chosentwofanficcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_csiforensicscom.py b/fanficfare/adapters/adapter_csiforensicscom.py
index cba47bca..1c1d7c80 100644
--- a/fanficfare/adapters/adapter_csiforensicscom.py
+++ b/fanficfare/adapters/adapter_csiforensicscom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_darksolaceorg.py b/fanficfare/adapters/adapter_darksolaceorg.py
index 925caa4a..fd31765c 100644
--- a/fanficfare/adapters/adapter_darksolaceorg.py
+++ b/fanficfare/adapters/adapter_darksolaceorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_deandamagecom.py b/fanficfare/adapters/adapter_deandamagecom.py
index 27a83e6e..7c3780a1 100644
--- a/fanficfare/adapters/adapter_deandamagecom.py
+++ b/fanficfare/adapters/adapter_deandamagecom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_deepinmysoulnet.py b/fanficfare/adapters/adapter_deepinmysoulnet.py
index a25a6e25..ad76e765 100644
--- a/fanficfare/adapters/adapter_deepinmysoulnet.py
+++ b/fanficfare/adapters/adapter_deepinmysoulnet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_destinysgatewaycom.py b/fanficfare/adapters/adapter_destinysgatewaycom.py
index 28bde864..d93b772c 100644
--- a/fanficfare/adapters/adapter_destinysgatewaycom.py
+++ b/fanficfare/adapters/adapter_destinysgatewaycom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_dokugacom.py b/fanficfare/adapters/adapter_dokugacom.py
index 80913b88..0a41ad39 100644
--- a/fanficfare/adapters/adapter_dokugacom.py
+++ b/fanficfare/adapters/adapter_dokugacom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_dracoandginnycom.py b/fanficfare/adapters/adapter_dracoandginnycom.py
index 436b09ab..bd98e13f 100644
--- a/fanficfare/adapters/adapter_dracoandginnycom.py
+++ b/fanficfare/adapters/adapter_dracoandginnycom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_dramioneorg.py b/fanficfare/adapters/adapter_dramioneorg.py
index 856f928b..32152863 100644
--- a/fanficfare/adapters/adapter_dramioneorg.py
+++ b/fanficfare/adapters/adapter_dramioneorg.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_efictionestelielde.py b/fanficfare/adapters/adapter_efictionestelielde.py
index 233eda25..9c481e88 100644
--- a/fanficfare/adapters/adapter_efictionestelielde.py
+++ b/fanficfare/adapters/adapter_efictionestelielde.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_efpfanficnet.py b/fanficfare/adapters/adapter_efpfanficnet.py
index 52cd53b3..959219e9 100644
--- a/fanficfare/adapters/adapter_efpfanficnet.py
+++ b/fanficfare/adapters/adapter_efpfanficnet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
index fd5789ae..ce47a784 100644
--- a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
+++ b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fanficauthorsnet.py b/fanficfare/adapters/adapter_fanficauthorsnet.py
index a18b925f..7d583559 100644
--- a/fanficfare/adapters/adapter_fanficauthorsnet.py
+++ b/fanficfare/adapters/adapter_fanficauthorsnet.py
@@ -18,6 +18,7 @@
### Adapted by GComyn - November 26, 2016
###
####################################################################################################
+from __future__ import absolute_import
from __future__ import unicode_literals
import time
import logging
diff --git a/fanficfare/adapters/adapter_fanficcastletvnet.py b/fanficfare/adapters/adapter_fanficcastletvnet.py
index ee186272..15e1e219 100644
--- a/fanficfare/adapters/adapter_fanficcastletvnet.py
+++ b/fanficfare/adapters/adapter_fanficcastletvnet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fanfichu.py b/fanficfare/adapters/adapter_fanfichu.py
index 7685a6d4..59f702d7 100644
--- a/fanficfare/adapters/adapter_fanfichu.py
+++ b/fanficfare/adapters/adapter_fanfichu.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import re
import urllib2
import urlparse
diff --git a/fanficfare/adapters/adapter_fanfictionjunkiesde.py b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
index c2418b47..6c436945 100644
--- a/fanficfare/adapters/adapter_fanfictionjunkiesde.py
+++ b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fanfiktionde.py b/fanficfare/adapters/adapter_fanfiktionde.py
index 70516081..ad59a421 100644
--- a/fanficfare/adapters/adapter_fanfiktionde.py
+++ b/fanficfare/adapters/adapter_fanfiktionde.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fannation.py b/fanficfare/adapters/adapter_fannation.py
index 78a053ca..7a06bb26 100644
--- a/fanficfare/adapters/adapter_fannation.py
+++ b/fanficfare/adapters/adapter_fannation.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import re
from base_efiction_adapter import BaseEfictionAdapter
diff --git a/fanficfare/adapters/adapter_fhsarchivecom.py b/fanficfare/adapters/adapter_fhsarchivecom.py
index d2f9ff3d..27479da4 100644
--- a/fanficfare/adapters/adapter_fhsarchivecom.py
+++ b/fanficfare/adapters/adapter_fhsarchivecom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import re
from base_efiction_adapter import BaseEfictionAdapter
diff --git a/fanficfare/adapters/adapter_ficbooknet.py b/fanficfare/adapters/adapter_ficbooknet.py
index 14304ab7..dcfd3c33 100644
--- a/fanficfare/adapters/adapter_ficbooknet.py
+++ b/fanficfare/adapters/adapter_ficbooknet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import datetime
import logging
diff --git a/fanficfare/adapters/adapter_fictionalleyorg.py b/fanficfare/adapters/adapter_fictionalleyorg.py
index bdbf323d..b7e53b0e 100644
--- a/fanficfare/adapters/adapter_fictionalleyorg.py
+++ b/fanficfare/adapters/adapter_fictionalleyorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fictionhuntcom.py b/fanficfare/adapters/adapter_fictionhuntcom.py
index e3ae1bef..17ed001e 100644
--- a/fanficfare/adapters/adapter_fictionhuntcom.py
+++ b/fanficfare/adapters/adapter_fictionhuntcom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_fictionmaniatv.py b/fanficfare/adapters/adapter_fictionmaniatv.py
index d6de271c..2506e8a3 100644
--- a/fanficfare/adapters/adapter_fictionmaniatv.py
+++ b/fanficfare/adapters/adapter_fictionmaniatv.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import re
import urllib2
import urlparse
diff --git a/fanficfare/adapters/adapter_fictionpadcom.py b/fanficfare/adapters/adapter_fictionpadcom.py
index 2881bfe6..4440356c 100644
--- a/fanficfare/adapters/adapter_fictionpadcom.py
+++ b/fanficfare/adapters/adapter_fictionpadcom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fictionpresscom.py b/fanficfare/adapters/adapter_fictionpresscom.py
index 19882975..bfd667f9 100644
--- a/fanficfare/adapters/adapter_fictionpresscom.py
+++ b/fanficfare/adapters/adapter_fictionpresscom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_ficwadcom.py b/fanficfare/adapters/adapter_ficwadcom.py
index 2f2fc834..7991b7a8 100644
--- a/fanficfare/adapters/adapter_ficwadcom.py
+++ b/fanficfare/adapters/adapter_ficwadcom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fimfictionnet.py b/fanficfare/adapters/adapter_fimfictionnet.py
index 42f7233b..03f0c9ec 100644
--- a/fanficfare/adapters/adapter_fimfictionnet.py
+++ b/fanficfare/adapters/adapter_fimfictionnet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
from datetime import date, datetime
import logging
diff --git a/fanficfare/adapters/adapter_finestoriescom.py b/fanficfare/adapters/adapter_finestoriescom.py
index 59d47e6c..2c66b756 100644
--- a/fanficfare/adapters/adapter_finestoriescom.py
+++ b/fanficfare/adapters/adapter_finestoriescom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
from adapter_storiesonlinenet import StoriesOnlineNetAdapter
diff --git a/fanficfare/adapters/adapter_fireflyfansnet.py b/fanficfare/adapters/adapter_fireflyfansnet.py
index 005884f2..5027222b 100644
--- a/fanficfare/adapters/adapter_fireflyfansnet.py
+++ b/fanficfare/adapters/adapter_fireflyfansnet.py
@@ -17,6 +17,7 @@
####################################################################################################
# Adapted by GComyn - December 10, 2016
####################################################################################################
+from __future__ import absolute_import
''' This adapter will download the stories from the www.fireflyfans.net forum pages '''
import logging
import re
diff --git a/fanficfare/adapters/adapter_fireflypopulliorg.py b/fanficfare/adapters/adapter_fireflypopulliorg.py
index 028862c9..97f64aa1 100644
--- a/fanficfare/adapters/adapter_fireflypopulliorg.py
+++ b/fanficfare/adapters/adapter_fireflypopulliorg.py
@@ -21,6 +21,7 @@
###=================================================================================================
### I have started to use lines of # on the line just before a function so they are easier to find.
####################################################################################################
+from __future__ import absolute_import
''' This adapter scrapes the metadata and chapter text from stories on firefly.populli.org '''
import logging
import re
diff --git a/fanficfare/adapters/adapter_forumquestionablequestingcom.py b/fanficfare/adapters/adapter_forumquestionablequestingcom.py
index 6c07d4cb..bfeab8cf 100644
--- a/fanficfare/adapters/adapter_forumquestionablequestingcom.py
+++ b/fanficfare/adapters/adapter_forumquestionablequestingcom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import re
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_forumssufficientvelocitycom.py b/fanficfare/adapters/adapter_forumssufficientvelocitycom.py
index 679f9df1..3222d1b0 100644
--- a/fanficfare/adapters/adapter_forumssufficientvelocitycom.py
+++ b/fanficfare/adapters/adapter_forumssufficientvelocitycom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import re
from base_xenforoforum_adapter import BaseXenForoForumAdapter
diff --git a/fanficfare/adapters/adapter_gluttonyfictioncom.py b/fanficfare/adapters/adapter_gluttonyfictioncom.py
index eef9410a..306abf92 100644
--- a/fanficfare/adapters/adapter_gluttonyfictioncom.py
+++ b/fanficfare/adapters/adapter_gluttonyfictioncom.py
@@ -21,6 +21,7 @@
### Rewritten by: GComyn on November, 06, 2016
### Original was adapter_fannation.py
##################################################################################
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class GluttonyFictionComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_gravitytalescom.py b/fanficfare/adapters/adapter_gravitytalescom.py
index 97183427..9391cf8d 100644
--- a/fanficfare/adapters/adapter_gravitytalescom.py
+++ b/fanficfare/adapters/adapter_gravitytalescom.py
@@ -18,6 +18,7 @@
## Adapted by GComyn on April 21, 2017
####################################################################################################
+from __future__ import absolute_import
import logging
import re
import time
diff --git a/fanficfare/adapters/adapter_harrypotterfanfictioncom.py b/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
index 8fe607be..3b1d172f 100644
--- a/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
+++ b/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_hlfictionnet.py b/fanficfare/adapters/adapter_hlfictionnet.py
index 92686186..52adf5e9 100644
--- a/fanficfare/adapters/adapter_hlfictionnet.py
+++ b/fanficfare/adapters/adapter_hlfictionnet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_hpfanficarchivecom.py b/fanficfare/adapters/adapter_hpfanficarchivecom.py
index 1cebac01..71bd805f 100644
--- a/fanficfare/adapters/adapter_hpfanficarchivecom.py
+++ b/fanficfare/adapters/adapter_hpfanficarchivecom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_iketernalnet.py b/fanficfare/adapters/adapter_iketernalnet.py
index a0e5bd13..e2ff31d7 100644
--- a/fanficfare/adapters/adapter_iketernalnet.py
+++ b/fanficfare/adapters/adapter_iketernalnet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_imagineeficcom.py b/fanficfare/adapters/adapter_imagineeficcom.py
index 32ecf81e..de9aae23 100644
--- a/fanficfare/adapters/adapter_imagineeficcom.py
+++ b/fanficfare/adapters/adapter_imagineeficcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_imrightbehindyoucom.py b/fanficfare/adapters/adapter_imrightbehindyoucom.py
index f07f3a83..349c1d8b 100644
--- a/fanficfare/adapters/adapter_imrightbehindyoucom.py
+++ b/fanficfare/adapters/adapter_imrightbehindyoucom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_inkbunnynet.py b/fanficfare/adapters/adapter_inkbunnynet.py
index d8a15eff..ce30351e 100644
--- a/fanficfare/adapters/adapter_inkbunnynet.py
+++ b/fanficfare/adapters/adapter_inkbunnynet.py
@@ -18,6 +18,7 @@
# Adapted by GComyn on April 24, 2017
# Updated by GComyn on June 11, 2018
+from __future__ import absolute_import
import logging
import re
import sys
diff --git a/fanficfare/adapters/adapter_itcouldhappennet.py b/fanficfare/adapters/adapter_itcouldhappennet.py
index 7e6329c5..c8860e85 100644
--- a/fanficfare/adapters/adapter_itcouldhappennet.py
+++ b/fanficfare/adapters/adapter_itcouldhappennet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import re
from base_efiction_adapter import BaseEfictionAdapter
diff --git a/fanficfare/adapters/adapter_kiarepositorymujajinet.py b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
index df6f06b9..568e63d2 100644
--- a/fanficfare/adapters/adapter_kiarepositorymujajinet.py
+++ b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_ksarchivecom.py b/fanficfare/adapters/adapter_ksarchivecom.py
index ad7dbe80..e0897a15 100644
--- a/fanficfare/adapters/adapter_ksarchivecom.py
+++ b/fanficfare/adapters/adapter_ksarchivecom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_lcfanficcom.py b/fanficfare/adapters/adapter_lcfanficcom.py
index 14155e5a..412c402e 100644
--- a/fanficfare/adapters/adapter_lcfanficcom.py
+++ b/fanficfare/adapters/adapter_lcfanficcom.py
@@ -18,6 +18,7 @@
## Adapted by GComyn on April 22, 2017
####################################################################################################
+from __future__ import absolute_import
import logging
import json
import re
diff --git a/fanficfare/adapters/adapter_libraryofmoriacom.py b/fanficfare/adapters/adapter_libraryofmoriacom.py
index 08196cfb..21e0ee55 100644
--- a/fanficfare/adapters/adapter_libraryofmoriacom.py
+++ b/fanficfare/adapters/adapter_libraryofmoriacom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class LibraryOfMoriaComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_lightnovelgatecom.py b/fanficfare/adapters/adapter_lightnovelgatecom.py
index fe79e103..4489beae 100644
--- a/fanficfare/adapters/adapter_lightnovelgatecom.py
+++ b/fanficfare/adapters/adapter_lightnovelgatecom.py
@@ -19,6 +19,7 @@
### Tested with Calibre
####################################################################################################
+from __future__ import absolute_import
import logging
import re
import urllib2
diff --git a/fanficfare/adapters/adapter_literotica.py b/fanficfare/adapters/adapter_literotica.py
index 8446ab84..b2269877 100644
--- a/fanficfare/adapters/adapter_literotica.py
+++ b/fanficfare/adapters/adapter_literotica.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_looselugscom.py b/fanficfare/adapters/adapter_looselugscom.py
index 20b93af2..cd8b3302 100644
--- a/fanficfare/adapters/adapter_looselugscom.py
+++ b/fanficfare/adapters/adapter_looselugscom.py
@@ -21,6 +21,7 @@
### Rewritten by: GComyn on November, 06, 2016
### Original was adapter_fannation.py
##################################################################################
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class LooseLugsComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_lotrfanfictioncom.py b/fanficfare/adapters/adapter_lotrfanfictioncom.py
index ad4f9c33..e9f04dc3 100644
--- a/fanficfare/adapters/adapter_lotrfanfictioncom.py
+++ b/fanficfare/adapters/adapter_lotrfanfictioncom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class TheLOTRFanFictionSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_lotrgficcom.py b/fanficfare/adapters/adapter_lotrgficcom.py
index 6fb6392d..f1cc006c 100644
--- a/fanficfare/adapters/adapter_lotrgficcom.py
+++ b/fanficfare/adapters/adapter_lotrgficcom.py
@@ -18,6 +18,7 @@
### Adapted by GComyn
### Completed on November, 22, 2016
##############################################################################
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_lumossycophanthexcom.py b/fanficfare/adapters/adapter_lumossycophanthexcom.py
index f87410be..4b353c39 100644
--- a/fanficfare/adapters/adapter_lumossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_lumossycophanthexcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_masseffect2in.py b/fanficfare/adapters/adapter_masseffect2in.py
index 2fb26965..338fe488 100644
--- a/fanficfare/adapters/adapter_masseffect2in.py
+++ b/fanficfare/adapters/adapter_masseffect2in.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import bs4
import datetime
import logging
diff --git a/fanficfare/adapters/adapter_mcstoriescom.py b/fanficfare/adapters/adapter_mcstoriescom.py
index 93572bf2..332cc5e3 100644
--- a/fanficfare/adapters/adapter_mcstoriescom.py
+++ b/fanficfare/adapters/adapter_mcstoriescom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_mediaminerorg.py b/fanficfare/adapters/adapter_mediaminerorg.py
index 2bfb4bdc..ac5eb45e 100644
--- a/fanficfare/adapters/adapter_mediaminerorg.py
+++ b/fanficfare/adapters/adapter_mediaminerorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_merlinficdtwinscouk.py b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
index b34cdc40..41671955 100644
--- a/fanficfare/adapters/adapter_merlinficdtwinscouk.py
+++ b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_midnightwhispers.py b/fanficfare/adapters/adapter_midnightwhispers.py
index fa889f8a..86d5a6cf 100644
--- a/fanficfare/adapters/adapter_midnightwhispers.py
+++ b/fanficfare/adapters/adapter_midnightwhispers.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_mttjustoncenet.py b/fanficfare/adapters/adapter_mttjustoncenet.py
index ad8c09ad..097b6a04 100644
--- a/fanficfare/adapters/adapter_mttjustoncenet.py
+++ b/fanficfare/adapters/adapter_mttjustoncenet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_naiceanilmenet.py b/fanficfare/adapters/adapter_naiceanilmenet.py
index 3838f06f..a7a913fd 100644
--- a/fanficfare/adapters/adapter_naiceanilmenet.py
+++ b/fanficfare/adapters/adapter_naiceanilmenet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class NaiceaNilmeNetAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_narutoficorg.py b/fanficfare/adapters/adapter_narutoficorg.py
index 35e3bb86..09441d5f 100644
--- a/fanficfare/adapters/adapter_narutoficorg.py
+++ b/fanficfare/adapters/adapter_narutoficorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_nationallibrarynet.py b/fanficfare/adapters/adapter_nationallibrarynet.py
index bdea9c85..fc8d8655 100644
--- a/fanficfare/adapters/adapter_nationallibrarynet.py
+++ b/fanficfare/adapters/adapter_nationallibrarynet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_ncisficcom.py b/fanficfare/adapters/adapter_ncisficcom.py
index 79ceccce..6fa3f33e 100644
--- a/fanficfare/adapters/adapter_ncisficcom.py
+++ b/fanficfare/adapters/adapter_ncisficcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_ncisfictioncom.py b/fanficfare/adapters/adapter_ncisfictioncom.py
index 974adbb7..8b989449 100644
--- a/fanficfare/adapters/adapter_ncisfictioncom.py
+++ b/fanficfare/adapters/adapter_ncisfictioncom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class NCISFictionComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_nfacommunitycom.py b/fanficfare/adapters/adapter_nfacommunitycom.py
index 754948a6..d1650176 100644
--- a/fanficfare/adapters/adapter_nfacommunitycom.py
+++ b/fanficfare/adapters/adapter_nfacommunitycom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_nhamagicalworldsus.py b/fanficfare/adapters/adapter_nhamagicalworldsus.py
index bb78b5ac..214dc371 100644
--- a/fanficfare/adapters/adapter_nhamagicalworldsus.py
+++ b/fanficfare/adapters/adapter_nhamagicalworldsus.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
# Class name has to be unique. Our convention is camel case the
diff --git a/fanficfare/adapters/adapter_ninelivesarchivecom.py b/fanficfare/adapters/adapter_ninelivesarchivecom.py
index 08c5c08d..13e052cc 100644
--- a/fanficfare/adapters/adapter_ninelivesarchivecom.py
+++ b/fanficfare/adapters/adapter_ninelivesarchivecom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import re
from base_efiction_adapter import BaseEfictionAdapter
diff --git a/fanficfare/adapters/adapter_nocturnallightnet.py b/fanficfare/adapters/adapter_nocturnallightnet.py
index db86d4cd..95edc0a3 100644
--- a/fanficfare/adapters/adapter_nocturnallightnet.py
+++ b/fanficfare/adapters/adapter_nocturnallightnet.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import re
import urllib2
import urlparse
diff --git a/fanficfare/adapters/adapter_noveltrovecom.py b/fanficfare/adapters/adapter_noveltrovecom.py
index 24ddf5c8..c7907439 100644
--- a/fanficfare/adapters/adapter_noveltrovecom.py
+++ b/fanficfare/adapters/adapter_noveltrovecom.py
@@ -18,6 +18,7 @@
## Adapted by GComyn on April 22, 2017
####################################################################################################
+from __future__ import absolute_import
import logging
import json
import re
diff --git a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
index ce49c8d5..01ff9228 100644
--- a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
+++ b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_phoenixsongnet.py b/fanficfare/adapters/adapter_phoenixsongnet.py
index 6994cc38..05197f29 100644
--- a/fanficfare/adapters/adapter_phoenixsongnet.py
+++ b/fanficfare/adapters/adapter_phoenixsongnet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_ponyfictionarchivenet.py b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
index 54775eff..ad2ad143 100644
--- a/fanficfare/adapters/adapter_ponyfictionarchivenet.py
+++ b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_potionsandsnitches.py b/fanficfare/adapters/adapter_potionsandsnitches.py
index d25212a6..7b9ec32f 100644
--- a/fanficfare/adapters/adapter_potionsandsnitches.py
+++ b/fanficfare/adapters/adapter_potionsandsnitches.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_potterficscom.py b/fanficfare/adapters/adapter_potterficscom.py
index e3f789e4..654ee548 100644
--- a/fanficfare/adapters/adapter_potterficscom.py
+++ b/fanficfare/adapters/adapter_potterficscom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import datetime
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_potterheadsanonymouscom.py b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
index 4184e19d..bf806bec 100644
--- a/fanficfare/adapters/adapter_potterheadsanonymouscom.py
+++ b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_pretendercentrecom.py b/fanficfare/adapters/adapter_pretendercentrecom.py
index 0d644c34..76c42127 100644
--- a/fanficfare/adapters/adapter_pretendercentrecom.py
+++ b/fanficfare/adapters/adapter_pretendercentrecom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_qafficcom.py b/fanficfare/adapters/adapter_qafficcom.py
index 9d5d252f..5fba91f0 100644
--- a/fanficfare/adapters/adapter_qafficcom.py
+++ b/fanficfare/adapters/adapter_qafficcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_quotevcom.py b/fanficfare/adapters/adapter_quotevcom.py
index 282ba283..6fd27b63 100644
--- a/fanficfare/adapters/adapter_quotevcom.py
+++ b/fanficfare/adapters/adapter_quotevcom.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import re
import urlparse
import urllib2
diff --git a/fanficfare/adapters/adapter_royalroadl.py b/fanficfare/adapters/adapter_royalroadl.py
index 0b5defd2..5afd0747 100644
--- a/fanficfare/adapters/adapter_royalroadl.py
+++ b/fanficfare/adapters/adapter_royalroadl.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import contextlib
from datetime import datetime
import httplib
diff --git a/fanficfare/adapters/adapter_samandjacknet.py b/fanficfare/adapters/adapter_samandjacknet.py
index 8304bafb..aedf5050 100644
--- a/fanficfare/adapters/adapter_samandjacknet.py
+++ b/fanficfare/adapters/adapter_samandjacknet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_scarvesandcoffeenet.py b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
index b2b1d9c1..be3075d7 100644
--- a/fanficfare/adapters/adapter_scarvesandcoffeenet.py
+++ b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_sebklainenet.py b/fanficfare/adapters/adapter_sebklainenet.py
index 5862e231..8cbb8baf 100644
--- a/fanficfare/adapters/adapter_sebklainenet.py
+++ b/fanficfare/adapters/adapter_sebklainenet.py
@@ -19,6 +19,7 @@
###################################################################################################
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class SebklaineNeteOrgSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_sheppardweircom.py b/fanficfare/adapters/adapter_sheppardweircom.py
index 6b1fa76a..578d8035 100644
--- a/fanficfare/adapters/adapter_sheppardweircom.py
+++ b/fanficfare/adapters/adapter_sheppardweircom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_shriftweborgbfa.py b/fanficfare/adapters/adapter_shriftweborgbfa.py
index b1fbaa4f..6d53b733 100644
--- a/fanficfare/adapters/adapter_shriftweborgbfa.py
+++ b/fanficfare/adapters/adapter_shriftweborgbfa.py
@@ -21,6 +21,7 @@
###=================================================================================================
### I have started to use lines of # on the line just before a function so they are easier to find.
####################################################################################################
+from __future__ import absolute_import
''' This adapter scrapes the metadata and chapter text from stories on archive.shriftweb.org '''
import logging
import re
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py b/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
index 3fd9d1df..9cd84ad9 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class SinfulDreamsComUnicornFic(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py b/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
index 5082fe61..143415ee 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class SinfulDreamsComWhisperedMuse(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py b/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
index cebc3020..ca3bffb7 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class SinfulDreamsComWickedTemptation(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_siyecouk.py b/fanficfare/adapters/adapter_siyecouk.py
index 15628ff9..9c2b808e 100644
--- a/fanficfare/adapters/adapter_siyecouk.py
+++ b/fanficfare/adapters/adapter_siyecouk.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_spikeluvercom.py b/fanficfare/adapters/adapter_spikeluvercom.py
index adf8d2eb..c68339ae 100644
--- a/fanficfare/adapters/adapter_spikeluvercom.py
+++ b/fanficfare/adapters/adapter_spikeluvercom.py
@@ -1,4 +1,5 @@
# Software: eFiction
+from __future__ import absolute_import
import re
import urllib2
import urlparse
diff --git a/fanficfare/adapters/adapter_squidgeorgpeja.py b/fanficfare/adapters/adapter_squidgeorgpeja.py
index c49acaf2..2e8e84be 100644
--- a/fanficfare/adapters/adapter_squidgeorgpeja.py
+++ b/fanficfare/adapters/adapter_squidgeorgpeja.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_starskyhutcharchivenet.py b/fanficfare/adapters/adapter_starskyhutcharchivenet.py
index fe816265..f69194fc 100644
--- a/fanficfare/adapters/adapter_starskyhutcharchivenet.py
+++ b/fanficfare/adapters/adapter_starskyhutcharchivenet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_starslibrarynet.py b/fanficfare/adapters/adapter_starslibrarynet.py
index 678a2ccd..30c7ec78 100644
--- a/fanficfare/adapters/adapter_starslibrarynet.py
+++ b/fanficfare/adapters/adapter_starslibrarynet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_storiesofardacom.py b/fanficfare/adapters/adapter_storiesofardacom.py
index e31def73..388191ee 100644
--- a/fanficfare/adapters/adapter_storiesofardacom.py
+++ b/fanficfare/adapters/adapter_storiesofardacom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_storiesonlinenet.py b/fanficfare/adapters/adapter_storiesonlinenet.py
index 2f224769..b770f282 100644
--- a/fanficfare/adapters/adapter_storiesonlinenet.py
+++ b/fanficfare/adapters/adapter_storiesonlinenet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_sugarquillnet.py b/fanficfare/adapters/adapter_sugarquillnet.py
index 65c43093..a9160b32 100644
--- a/fanficfare/adapters/adapter_sugarquillnet.py
+++ b/fanficfare/adapters/adapter_sugarquillnet.py
@@ -26,6 +26,7 @@
### take a long gime to process. I've removed as much of the extra
### formatting as I thought I could.
#############################################################################
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_swordborderlineangelcom.py b/fanficfare/adapters/adapter_swordborderlineangelcom.py
index 5f94da25..41d7a3c3 100644
--- a/fanficfare/adapters/adapter_swordborderlineangelcom.py
+++ b/fanficfare/adapters/adapter_swordborderlineangelcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
from base_efiction_adapter import BaseEfictionAdapter
diff --git a/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py b/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
index 94b2f19c..14a3b020 100644
--- a/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
+++ b/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_tenhawkpresentscom.py b/fanficfare/adapters/adapter_tenhawkpresentscom.py
index 89bc3d81..1d87abdd 100644
--- a/fanficfare/adapters/adapter_tenhawkpresentscom.py
+++ b/fanficfare/adapters/adapter_tenhawkpresentscom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_test1.py b/fanficfare/adapters/adapter_test1.py
index 156764aa..74a87dab 100644
--- a/fanficfare/adapters/adapter_test1.py
+++ b/fanficfare/adapters/adapter_test1.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import datetime
import time
import logging
diff --git a/fanficfare/adapters/adapter_tgstorytimecom.py b/fanficfare/adapters/adapter_tgstorytimecom.py
index ca3002e1..38e9e695 100644
--- a/fanficfare/adapters/adapter_tgstorytimecom.py
+++ b/fanficfare/adapters/adapter_tgstorytimecom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class TGStorytimeComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_thebrokenworldorg.py b/fanficfare/adapters/adapter_thebrokenworldorg.py
index ea18fc08..a5614b4a 100644
--- a/fanficfare/adapters/adapter_thebrokenworldorg.py
+++ b/fanficfare/adapters/adapter_thebrokenworldorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_thedelphicexpansecom.py b/fanficfare/adapters/adapter_thedelphicexpansecom.py
index 4a063019..6c4972b1 100644
--- a/fanficfare/adapters/adapter_thedelphicexpansecom.py
+++ b/fanficfare/adapters/adapter_thedelphicexpansecom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_thehookupzonenet.py b/fanficfare/adapters/adapter_thehookupzonenet.py
index 78563fc3..6aa7da74 100644
--- a/fanficfare/adapters/adapter_thehookupzonenet.py
+++ b/fanficfare/adapters/adapter_thehookupzonenet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class TheHookupZoneNetAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_themaplebookshelf.py b/fanficfare/adapters/adapter_themaplebookshelf.py
index 186708e7..fdf58f22 100644
--- a/fanficfare/adapters/adapter_themaplebookshelf.py
+++ b/fanficfare/adapters/adapter_themaplebookshelf.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class TheMapleBookshelfComSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_themasquenet.py b/fanficfare/adapters/adapter_themasquenet.py
index 60f7482b..b040ca1a 100644
--- a/fanficfare/adapters/adapter_themasquenet.py
+++ b/fanficfare/adapters/adapter_themasquenet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_thepetulantpoetesscom.py b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
index a1a70f5e..bf1fd8f1 100644
--- a/fanficfare/adapters/adapter_thepetulantpoetesscom.py
+++ b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_thundercatsfansorg.py b/fanficfare/adapters/adapter_thundercatsfansorg.py
index 3b31a201..b1157dbe 100644
--- a/fanficfare/adapters/adapter_thundercatsfansorg.py
+++ b/fanficfare/adapters/adapter_thundercatsfansorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_tolkienfanfiction.py b/fanficfare/adapters/adapter_tolkienfanfiction.py
index c53d65fd..ba79b2e5 100644
--- a/fanficfare/adapters/adapter_tolkienfanfiction.py
+++ b/fanficfare/adapters/adapter_tolkienfanfiction.py
@@ -14,6 +14,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
"""
FFDL Adapter for TolkienFanFiction.com.
diff --git a/fanficfare/adapters/adapter_tomparisdormcom.py b/fanficfare/adapters/adapter_tomparisdormcom.py
index 76c67881..aba06597 100644
--- a/fanficfare/adapters/adapter_tomparisdormcom.py
+++ b/fanficfare/adapters/adapter_tomparisdormcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_trekfanfictionnet.py b/fanficfare/adapters/adapter_trekfanfictionnet.py
index 7f288d37..aec0bb77 100644
--- a/fanficfare/adapters/adapter_trekfanfictionnet.py
+++ b/fanficfare/adapters/adapter_trekfanfictionnet.py
@@ -22,6 +22,7 @@
###=================================================================================================
### I have started to use lines of # on the line just before a function so they are easier to find.
####################################################################################################
+from __future__ import absolute_import
'''
This will scrape the chapter text and metadata from stories on the site trekfanfiction.net
'''
diff --git a/fanficfare/adapters/adapter_trekiverseorg.py b/fanficfare/adapters/adapter_trekiverseorg.py
index e32a8499..52da5f39 100644
--- a/fanficfare/adapters/adapter_trekiverseorg.py
+++ b/fanficfare/adapters/adapter_trekiverseorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_tthfanficorg.py b/fanficfare/adapters/adapter_tthfanficorg.py
index 206a349e..d0954fa4 100644
--- a/fanficfare/adapters/adapter_tthfanficorg.py
+++ b/fanficfare/adapters/adapter_tthfanficorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_twilightarchivescom.py b/fanficfare/adapters/adapter_twilightarchivescom.py
index 7cabe5c7..40a6bd0c 100644
--- a/fanficfare/adapters/adapter_twilightarchivescom.py
+++ b/fanficfare/adapters/adapter_twilightarchivescom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_twilightednet.py b/fanficfare/adapters/adapter_twilightednet.py
index 44f689a9..b9671d5a 100644
--- a/fanficfare/adapters/adapter_twilightednet.py
+++ b/fanficfare/adapters/adapter_twilightednet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_unknowableroomorg.py b/fanficfare/adapters/adapter_unknowableroomorg.py
index eb033a46..aac1bfba 100644
--- a/fanficfare/adapters/adapter_unknowableroomorg.py
+++ b/fanficfare/adapters/adapter_unknowableroomorg.py
@@ -17,6 +17,7 @@
####################################################################################################
### Adapted by GComyn on December 19, 2016
####################################################################################################
+from __future__ import absolute_import
''' This adapter will download stories from the site unknowableroom.org '''
import logging
import re
diff --git a/fanficfare/adapters/adapter_valentchambercom.py b/fanficfare/adapters/adapter_valentchambercom.py
index 4b3b6d2a..4f20227a 100644
--- a/fanficfare/adapters/adapter_valentchambercom.py
+++ b/fanficfare/adapters/adapter_valentchambercom.py
@@ -21,6 +21,7 @@
### Rewritten by: GComyn on November, 06, 2016
### Original was adapter_fannation.py
##################################################################################
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class ValentChamberComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_voracity2eficcom.py b/fanficfare/adapters/adapter_voracity2eficcom.py
index 3ee014ed..a8dcee63 100644
--- a/fanficfare/adapters/adapter_voracity2eficcom.py
+++ b/fanficfare/adapters/adapter_voracity2eficcom.py
@@ -1,4 +1,5 @@
# Software: eFiction
+from __future__ import absolute_import
import re
import urllib2
import urlparse
diff --git a/fanficfare/adapters/adapter_walkingtheplankorg.py b/fanficfare/adapters/adapter_walkingtheplankorg.py
index 037c87ac..45b1ac35 100644
--- a/fanficfare/adapters/adapter_walkingtheplankorg.py
+++ b/fanficfare/adapters/adapter_walkingtheplankorg.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_webnovelcom.py b/fanficfare/adapters/adapter_webnovelcom.py
index 89e10fa3..0f43b590 100644
--- a/fanficfare/adapters/adapter_webnovelcom.py
+++ b/fanficfare/adapters/adapter_webnovelcom.py
@@ -16,6 +16,7 @@
#
# Adapted by GComyn on April 16, 2017
+from __future__ import absolute_import
import cgi
import difflib
import json
diff --git a/fanficfare/adapters/adapter_whoficcom.py b/fanficfare/adapters/adapter_whoficcom.py
index af8a608e..2f6c92f9 100644
--- a/fanficfare/adapters/adapter_whoficcom.py
+++ b/fanficfare/adapters/adapter_whoficcom.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_wolverineandroguecom.py b/fanficfare/adapters/adapter_wolverineandroguecom.py
index 4db5801c..9d450156 100644
--- a/fanficfare/adapters/adapter_wolverineandroguecom.py
+++ b/fanficfare/adapters/adapter_wolverineandroguecom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_wraithbaitcom.py b/fanficfare/adapters/adapter_wraithbaitcom.py
index 47e659a5..06f3c751 100644
--- a/fanficfare/adapters/adapter_wraithbaitcom.py
+++ b/fanficfare/adapters/adapter_wraithbaitcom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
index 203bb0ad..b7a0aa77 100644
--- a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
+++ b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
@@ -16,6 +16,7 @@
#
# Software: eFiction
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_wuxiaworldco.py b/fanficfare/adapters/adapter_wuxiaworldco.py
index fbead952..3f19b5aa 100644
--- a/fanficfare/adapters/adapter_wuxiaworldco.py
+++ b/fanficfare/adapters/adapter_wuxiaworldco.py
@@ -16,6 +16,7 @@
#
+from __future__ import absolute_import
import logging
import re
import urllib2
diff --git a/fanficfare/adapters/adapter_wuxiaworldcom.py b/fanficfare/adapters/adapter_wuxiaworldcom.py
index 8829b05e..6b5afebf 100644
--- a/fanficfare/adapters/adapter_wuxiaworldcom.py
+++ b/fanficfare/adapters/adapter_wuxiaworldcom.py
@@ -16,6 +16,7 @@
# Adapted by GComyn on December 14. 2016
+from __future__ import absolute_import
import json
import logging
import re
diff --git a/fanficfare/adapters/adapter_www13hoursorg.py b/fanficfare/adapters/adapter_www13hoursorg.py
index 857e2911..25a98ec8 100644
--- a/fanficfare/adapters/adapter_www13hoursorg.py
+++ b/fanficfare/adapters/adapter_www13hoursorg.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
diff --git a/fanficfare/adapters/adapter_wwwaneroticstorycom.py b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
index 19f5453a..3fe4299c 100644
--- a/fanficfare/adapters/adapter_wwwaneroticstorycom.py
+++ b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+from __future__ import absolute_import
import logging
import os
import re
diff --git a/fanficfare/adapters/adapter_wwwarea52hkhnet.py b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
index efbc22e9..d15d7453 100644
--- a/fanficfare/adapters/adapter_wwwarea52hkhnet.py
+++ b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
@@ -24,6 +24,7 @@
### Fixed the Metadata processing to take into account that some of the
### stories have the authorinfo div, and to make it more systematic
#############################################################################
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_wwwgiantessworldnet.py b/fanficfare/adapters/adapter_wwwgiantessworldnet.py
index a08ecb08..6ebe972b 100644
--- a/fanficfare/adapters/adapter_wwwgiantessworldnet.py
+++ b/fanficfare/adapters/adapter_wwwgiantessworldnet.py
@@ -18,6 +18,7 @@
### Adapted by GComyn - November 18, 2016
###########################################################################
# Software: eFiction
+from __future__ import absolute_import
from base_efiction_adapter import BaseEfictionAdapter
class WWWGiantessworldNetAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_wwwlushstoriescom.py b/fanficfare/adapters/adapter_wwwlushstoriescom.py
index e14da56f..6fa24cd6 100644
--- a/fanficfare/adapters/adapter_wwwlushstoriescom.py
+++ b/fanficfare/adapters/adapter_wwwlushstoriescom.py
@@ -20,6 +20,7 @@
# Updated on January 07, 2017 - fixed metadata capturing after Jimm fixed the UnidecodeError problem
####################################################################################################
+from __future__ import absolute_import
import time
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_wwwnovelallcom.py b/fanficfare/adapters/adapter_wwwnovelallcom.py
index 542adebb..055d49f6 100644
--- a/fanficfare/adapters/adapter_wwwnovelallcom.py
+++ b/fanficfare/adapters/adapter_wwwnovelallcom.py
@@ -19,6 +19,7 @@
### Tested with Calibre
####################################################################################################
+from __future__ import absolute_import
import logging
import re
import json
diff --git a/fanficfare/adapters/adapter_wwwutopiastoriescom.py b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
index 19db47f5..68f56ddf 100644
--- a/fanficfare/adapters/adapter_wwwutopiastoriescom.py
+++ b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
@@ -23,6 +23,7 @@
### Updated on December 18, 2016
### Updated format as per linter, and added documentation
####################################################################################################
+from __future__ import absolute_import
'''
This site is much link fictionmania, in that there is only one chapter per
story, so we only have the one url to get information from.
diff --git a/fanficfare/dateutils.py b/fanficfare/dateutils.py
index be6e2ba1..2716e253 100644
--- a/fanficfare/dateutils.py
+++ b/fanficfare/dateutils.py
@@ -1,3 +1,22 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 FanFicFare team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+
from datetime import datetime, timedelta
import logging
diff --git a/fanficfare/exceptions.py b/fanficfare/exceptions.py
index ec4dc8f0..5cc3e140 100644
--- a/fanficfare/exceptions.py
+++ b/fanficfare/exceptions.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,6 +15,8 @@
# limitations under the License.
#
+from __future__ import absolute_import
+
## A few exceptions for different things for adapters
class FailedToDownload(Exception):
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index 7f6d4a65..92fbfff4 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -2,6 +2,8 @@
# Copyright(c) 2009 Andrew Chatham and Vijay Pandurangan
## This module is used by mobi.py exclusively.
+## Renamed Jul 2018 to avoid conflict with other 'html' packages
+from __future__ import absolute_import
import re
import sys
diff --git a/fanficfare/translit.py b/fanficfare/translit.py
index 0efdc010..096ccff2 100644
--- a/fanficfare/translit.py
+++ b/fanficfare/translit.py
@@ -1,5 +1,7 @@
#-*-coding:utf-8-*-
# Code taken from http://python.su/forum/viewtopic.php?pid=66946
+from __future__ import absolute_import
+
import unicodedata
def is_syllable(letter):
syllables = ("A", "E", "I", "O", "U", "a", "e", "i", "o", "u")
From 7d651a53d19a98215e861581b7f75a16e593f1fb Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 14:48:00 -0500
Subject: [PATCH 025/120] Remove extra 'import time's
---
fanficfare/adapters/adapter_adastrafanficcom.py | 1 -
fanficfare/adapters/adapter_adultfanfictionorg.py | 1 -
fanficfare/adapters/adapter_andromedawebcom.py | 1 -
fanficfare/adapters/adapter_archiveofourownorg.py | 1 -
fanficfare/adapters/adapter_archiveskyehawkecom.py | 1 -
fanficfare/adapters/adapter_artemisfowlcom.py | 1 -
fanficfare/adapters/adapter_asexstoriescom.py | 2 --
fanficfare/adapters/adapter_ashwindersycophanthexcom.py | 1 -
fanficfare/adapters/adapter_asianfanficscom.py | 1 -
fanficfare/adapters/adapter_asr3slashzoneorg.py | 1 -
fanficfare/adapters/adapter_bloodtiesfancom.py | 1 -
fanficfare/adapters/adapter_buffygilescom.py | 1 -
fanficfare/adapters/adapter_chaossycophanthexcom.py | 1 -
fanficfare/adapters/adapter_chosentwofanficcom.py | 1 -
fanficfare/adapters/adapter_csiforensicscom.py | 1 -
fanficfare/adapters/adapter_deepinmysoulnet.py | 1 -
fanficfare/adapters/adapter_destinysgatewaycom.py | 1 -
fanficfare/adapters/adapter_dokugacom.py | 1 -
fanficfare/adapters/adapter_dracoandginnycom.py | 1 -
fanficfare/adapters/adapter_dramioneorg.py | 1 -
fanficfare/adapters/adapter_efictionestelielde.py | 1 -
fanficfare/adapters/adapter_efpfanficnet.py | 1 -
fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py | 1 -
fanficfare/adapters/adapter_fanficauthorsnet.py | 1 -
fanficfare/adapters/adapter_fanficcastletvnet.py | 1 -
fanficfare/adapters/adapter_fanfictionjunkiesde.py | 1 -
fanficfare/adapters/adapter_ficbooknet.py | 1 -
fanficfare/adapters/adapter_fictionalleyorg.py | 1 -
fanficfare/adapters/adapter_fictionpadcom.py | 2 --
fanficfare/adapters/adapter_fictionpresscom.py | 3 ---
fanficfare/adapters/adapter_ficwadcom.py | 2 --
fanficfare/adapters/adapter_fireflyfansnet.py | 1 -
fanficfare/adapters/adapter_gravitytalescom.py | 1 -
fanficfare/adapters/adapter_hlfictionnet.py | 1 -
fanficfare/adapters/adapter_hpfanficarchivecom.py | 1 -
fanficfare/adapters/adapter_iketernalnet.py | 1 -
fanficfare/adapters/adapter_imagineeficcom.py | 1 -
fanficfare/adapters/adapter_kiarepositorymujajinet.py | 1 -
fanficfare/adapters/adapter_ksarchivecom.py | 1 -
fanficfare/adapters/adapter_lcfanficcom.py | 1 -
fanficfare/adapters/adapter_literotica.py | 1 -
fanficfare/adapters/adapter_lotrgficcom.py | 1 -
fanficfare/adapters/adapter_lumossycophanthexcom.py | 1 -
fanficfare/adapters/adapter_mcstoriescom.py | 2 --
fanficfare/adapters/adapter_mediaminerorg.py | 1 -
fanficfare/adapters/adapter_merlinficdtwinscouk.py | 1 -
fanficfare/adapters/adapter_midnightwhispers.py | 1 -
fanficfare/adapters/adapter_nationallibrarynet.py | 1 -
fanficfare/adapters/adapter_ncisficcom.py | 1 -
fanficfare/adapters/adapter_nfacommunitycom.py | 1 -
fanficfare/adapters/adapter_noveltrovecom.py | 1 -
fanficfare/adapters/adapter_occlumencysycophanthexcom.py | 1 -
fanficfare/adapters/adapter_phoenixsongnet.py | 1 -
fanficfare/adapters/adapter_ponyfictionarchivenet.py | 1 -
fanficfare/adapters/adapter_potionsandsnitches.py | 1 -
fanficfare/adapters/adapter_potterheadsanonymouscom.py | 1 -
fanficfare/adapters/adapter_pretendercentrecom.py | 1 -
fanficfare/adapters/adapter_qafficcom.py | 1 -
fanficfare/adapters/adapter_samandjacknet.py | 1 -
fanficfare/adapters/adapter_scarvesandcoffeenet.py | 1 -
fanficfare/adapters/adapter_sheppardweircom.py | 1 -
fanficfare/adapters/adapter_siyecouk.py | 1 -
fanficfare/adapters/adapter_squidgeorgpeja.py | 1 -
fanficfare/adapters/adapter_storiesofardacom.py | 1 -
fanficfare/adapters/adapter_sugarquillnet.py | 1 -
fanficfare/adapters/adapter_tenhawkpresentscom.py | 1 -
fanficfare/adapters/adapter_themasquenet.py | 1 -
fanficfare/adapters/adapter_thepetulantpoetesscom.py | 1 -
fanficfare/adapters/adapter_tomparisdormcom.py | 1 -
fanficfare/adapters/adapter_trekiverseorg.py | 1 -
fanficfare/adapters/adapter_tthfanficorg.py | 3 ---
fanficfare/adapters/adapter_twilightarchivescom.py | 1 -
fanficfare/adapters/adapter_twilightednet.py | 1 -
fanficfare/adapters/adapter_unknowableroomorg.py | 1 -
fanficfare/adapters/adapter_walkingtheplankorg.py | 1 -
fanficfare/adapters/adapter_whoficcom.py | 1 -
fanficfare/adapters/adapter_wolverineandroguecom.py | 1 -
fanficfare/adapters/adapter_wraithbaitcom.py | 1 -
fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py | 1 -
fanficfare/adapters/adapter_wwwaneroticstorycom.py | 1 -
fanficfare/adapters/adapter_wwwarea52hkhnet.py | 1 -
fanficfare/adapters/adapter_wwwlushstoriescom.py | 1 -
fanficfare/adapters/adapter_wwwutopiastoriescom.py | 1 -
fanficfare/adapters/base_xenforoforum_adapter.py | 1 -
84 files changed, 92 deletions(-)
diff --git a/fanficfare/adapters/adapter_adastrafanficcom.py b/fanficfare/adapters/adapter_adastrafanficcom.py
index b4b9b56d..50c58dbf 100644
--- a/fanficfare/adapters/adapter_adastrafanficcom.py
+++ b/fanficfare/adapters/adapter_adastrafanficcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_adultfanfictionorg.py b/fanficfare/adapters/adapter_adultfanfictionorg.py
index 280bd497..b6b465a6 100644
--- a/fanficfare/adapters/adapter_adultfanfictionorg.py
+++ b/fanficfare/adapters/adapter_adultfanfictionorg.py
@@ -19,7 +19,6 @@
################################################################################
from __future__ import absolute_import
from __future__ import unicode_literals
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_andromedawebcom.py b/fanficfare/adapters/adapter_andromedawebcom.py
index 5cfc827f..ab7537be 100644
--- a/fanficfare/adapters/adapter_andromedawebcom.py
+++ b/fanficfare/adapters/adapter_andromedawebcom.py
@@ -19,7 +19,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_archiveofourownorg.py b/fanficfare/adapters/adapter_archiveofourownorg.py
index e131258f..f709817c 100644
--- a/fanficfare/adapters/adapter_archiveofourownorg.py
+++ b/fanficfare/adapters/adapter_archiveofourownorg.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_archiveskyehawkecom.py b/fanficfare/adapters/adapter_archiveskyehawkecom.py
index f324e4c6..dffd2cda 100644
--- a/fanficfare/adapters/adapter_archiveskyehawkecom.py
+++ b/fanficfare/adapters/adapter_archiveskyehawkecom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_artemisfowlcom.py b/fanficfare/adapters/adapter_artemisfowlcom.py
index d25257dc..eccc754c 100644
--- a/fanficfare/adapters/adapter_artemisfowlcom.py
+++ b/fanficfare/adapters/adapter_artemisfowlcom.py
@@ -19,7 +19,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_asexstoriescom.py b/fanficfare/adapters/adapter_asexstoriescom.py
index 5e8ef953..a11728c3 100644
--- a/fanficfare/adapters/adapter_asexstoriescom.py
+++ b/fanficfare/adapters/adapter_asexstoriescom.py
@@ -16,13 +16,11 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
import urllib2
import urlparse
-import time
import os
from bs4.element import Comment
diff --git a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
index cf42a680..48e03ddb 100644
--- a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
+++ b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_asianfanficscom.py b/fanficfare/adapters/adapter_asianfanficscom.py
index 69f98cd1..f7f7b920 100644
--- a/fanficfare/adapters/adapter_asianfanficscom.py
+++ b/fanficfare/adapters/adapter_asianfanficscom.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_asr3slashzoneorg.py b/fanficfare/adapters/adapter_asr3slashzoneorg.py
index c519af29..8365266e 100644
--- a/fanficfare/adapters/adapter_asr3slashzoneorg.py
+++ b/fanficfare/adapters/adapter_asr3slashzoneorg.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_bloodtiesfancom.py b/fanficfare/adapters/adapter_bloodtiesfancom.py
index 533b781f..e8bdf286 100644
--- a/fanficfare/adapters/adapter_bloodtiesfancom.py
+++ b/fanficfare/adapters/adapter_bloodtiesfancom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_buffygilescom.py b/fanficfare/adapters/adapter_buffygilescom.py
index 54669ce1..a9b1d0c5 100644
--- a/fanficfare/adapters/adapter_buffygilescom.py
+++ b/fanficfare/adapters/adapter_buffygilescom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_chaossycophanthexcom.py b/fanficfare/adapters/adapter_chaossycophanthexcom.py
index 8a0a1630..dc1dac76 100644
--- a/fanficfare/adapters/adapter_chaossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_chaossycophanthexcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_chosentwofanficcom.py b/fanficfare/adapters/adapter_chosentwofanficcom.py
index 2e0e57b3..862896f9 100644
--- a/fanficfare/adapters/adapter_chosentwofanficcom.py
+++ b/fanficfare/adapters/adapter_chosentwofanficcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_csiforensicscom.py b/fanficfare/adapters/adapter_csiforensicscom.py
index 1c1d7c80..e5f8057a 100644
--- a/fanficfare/adapters/adapter_csiforensicscom.py
+++ b/fanficfare/adapters/adapter_csiforensicscom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_deepinmysoulnet.py b/fanficfare/adapters/adapter_deepinmysoulnet.py
index ad76e765..81eae44a 100644
--- a/fanficfare/adapters/adapter_deepinmysoulnet.py
+++ b/fanficfare/adapters/adapter_deepinmysoulnet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_destinysgatewaycom.py b/fanficfare/adapters/adapter_destinysgatewaycom.py
index d93b772c..39e6426c 100644
--- a/fanficfare/adapters/adapter_destinysgatewaycom.py
+++ b/fanficfare/adapters/adapter_destinysgatewaycom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_dokugacom.py b/fanficfare/adapters/adapter_dokugacom.py
index 0a41ad39..bba8a5f5 100644
--- a/fanficfare/adapters/adapter_dokugacom.py
+++ b/fanficfare/adapters/adapter_dokugacom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_dracoandginnycom.py b/fanficfare/adapters/adapter_dracoandginnycom.py
index bd98e13f..49433492 100644
--- a/fanficfare/adapters/adapter_dracoandginnycom.py
+++ b/fanficfare/adapters/adapter_dracoandginnycom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_dramioneorg.py b/fanficfare/adapters/adapter_dramioneorg.py
index 32152863..4d622df3 100644
--- a/fanficfare/adapters/adapter_dramioneorg.py
+++ b/fanficfare/adapters/adapter_dramioneorg.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_efictionestelielde.py b/fanficfare/adapters/adapter_efictionestelielde.py
index 9c481e88..6b669798 100644
--- a/fanficfare/adapters/adapter_efictionestelielde.py
+++ b/fanficfare/adapters/adapter_efictionestelielde.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_efpfanficnet.py b/fanficfare/adapters/adapter_efpfanficnet.py
index 959219e9..da2bc1ec 100644
--- a/fanficfare/adapters/adapter_efpfanficnet.py
+++ b/fanficfare/adapters/adapter_efpfanficnet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
index ce47a784..05d0d5de 100644
--- a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
+++ b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_fanficauthorsnet.py b/fanficfare/adapters/adapter_fanficauthorsnet.py
index 7d583559..2cbfb20e 100644
--- a/fanficfare/adapters/adapter_fanficauthorsnet.py
+++ b/fanficfare/adapters/adapter_fanficauthorsnet.py
@@ -20,7 +20,6 @@
####################################################################################################
from __future__ import absolute_import
from __future__ import unicode_literals
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_fanficcastletvnet.py b/fanficfare/adapters/adapter_fanficcastletvnet.py
index 15e1e219..0d37dd2d 100644
--- a/fanficfare/adapters/adapter_fanficcastletvnet.py
+++ b/fanficfare/adapters/adapter_fanficcastletvnet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_fanfictionjunkiesde.py b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
index 6c436945..c6694b1d 100644
--- a/fanficfare/adapters/adapter_fanfictionjunkiesde.py
+++ b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_ficbooknet.py b/fanficfare/adapters/adapter_ficbooknet.py
index dcfd3c33..a8f74742 100644
--- a/fanficfare/adapters/adapter_ficbooknet.py
+++ b/fanficfare/adapters/adapter_ficbooknet.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import datetime
import logging
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_fictionalleyorg.py b/fanficfare/adapters/adapter_fictionalleyorg.py
index b7e53b0e..a8c35f03 100644
--- a/fanficfare/adapters/adapter_fictionalleyorg.py
+++ b/fanficfare/adapters/adapter_fictionalleyorg.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_fictionpadcom.py b/fanficfare/adapters/adapter_fictionpadcom.py
index 4440356c..5cebbc50 100644
--- a/fanficfare/adapters/adapter_fictionpadcom.py
+++ b/fanficfare/adapters/adapter_fictionpadcom.py
@@ -16,12 +16,10 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
import urllib2
-import time
import json
diff --git a/fanficfare/adapters/adapter_fictionpresscom.py b/fanficfare/adapters/adapter_fictionpresscom.py
index bfd667f9..8d5cef68 100644
--- a/fanficfare/adapters/adapter_fictionpresscom.py
+++ b/fanficfare/adapters/adapter_fictionpresscom.py
@@ -16,13 +16,10 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
import urllib2
-import time
-
## They're from the same people and pretty much identical.
from adapter_fanfictionnet import FanFictionNetSiteAdapter
diff --git a/fanficfare/adapters/adapter_ficwadcom.py b/fanficfare/adapters/adapter_ficwadcom.py
index 7991b7a8..ba0fc21e 100644
--- a/fanficfare/adapters/adapter_ficwadcom.py
+++ b/fanficfare/adapters/adapter_ficwadcom.py
@@ -16,12 +16,10 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
import urllib2
-import time
import httplib, urllib
from .. import exceptions as exceptions
diff --git a/fanficfare/adapters/adapter_fireflyfansnet.py b/fanficfare/adapters/adapter_fireflyfansnet.py
index 5027222b..99550358 100644
--- a/fanficfare/adapters/adapter_fireflyfansnet.py
+++ b/fanficfare/adapters/adapter_fireflyfansnet.py
@@ -22,7 +22,6 @@ from __future__ import absolute_import
import logging
import re
import sys
-import time
import urllib2
from base_adapter import BaseSiteAdapter, makeDate
diff --git a/fanficfare/adapters/adapter_gravitytalescom.py b/fanficfare/adapters/adapter_gravitytalescom.py
index 9391cf8d..720eee64 100644
--- a/fanficfare/adapters/adapter_gravitytalescom.py
+++ b/fanficfare/adapters/adapter_gravitytalescom.py
@@ -21,7 +21,6 @@
from __future__ import absolute_import
import logging
import re
-import time
import urllib2
from datetime import datetime
diff --git a/fanficfare/adapters/adapter_hlfictionnet.py b/fanficfare/adapters/adapter_hlfictionnet.py
index 52adf5e9..7c67da25 100644
--- a/fanficfare/adapters/adapter_hlfictionnet.py
+++ b/fanficfare/adapters/adapter_hlfictionnet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_hpfanficarchivecom.py b/fanficfare/adapters/adapter_hpfanficarchivecom.py
index 71bd805f..58a086a2 100644
--- a/fanficfare/adapters/adapter_hpfanficarchivecom.py
+++ b/fanficfare/adapters/adapter_hpfanficarchivecom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_iketernalnet.py b/fanficfare/adapters/adapter_iketernalnet.py
index e2ff31d7..be15c64d 100644
--- a/fanficfare/adapters/adapter_iketernalnet.py
+++ b/fanficfare/adapters/adapter_iketernalnet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_imagineeficcom.py b/fanficfare/adapters/adapter_imagineeficcom.py
index de9aae23..15c64028 100644
--- a/fanficfare/adapters/adapter_imagineeficcom.py
+++ b/fanficfare/adapters/adapter_imagineeficcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_kiarepositorymujajinet.py b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
index 568e63d2..d83a0c27 100644
--- a/fanficfare/adapters/adapter_kiarepositorymujajinet.py
+++ b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_ksarchivecom.py b/fanficfare/adapters/adapter_ksarchivecom.py
index e0897a15..312a19b3 100644
--- a/fanficfare/adapters/adapter_ksarchivecom.py
+++ b/fanficfare/adapters/adapter_ksarchivecom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_lcfanficcom.py b/fanficfare/adapters/adapter_lcfanficcom.py
index 412c402e..8ccdf2b6 100644
--- a/fanficfare/adapters/adapter_lcfanficcom.py
+++ b/fanficfare/adapters/adapter_lcfanficcom.py
@@ -23,7 +23,6 @@ import logging
import json
import re
import sys # ## used for debug purposes
-import time
import urllib2
import datetime
diff --git a/fanficfare/adapters/adapter_literotica.py b/fanficfare/adapters/adapter_literotica.py
index b2269877..537bc288 100644
--- a/fanficfare/adapters/adapter_literotica.py
+++ b/fanficfare/adapters/adapter_literotica.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_lotrgficcom.py b/fanficfare/adapters/adapter_lotrgficcom.py
index f1cc006c..b93c73f8 100644
--- a/fanficfare/adapters/adapter_lotrgficcom.py
+++ b/fanficfare/adapters/adapter_lotrgficcom.py
@@ -19,7 +19,6 @@
### Completed on November, 22, 2016
##############################################################################
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_lumossycophanthexcom.py b/fanficfare/adapters/adapter_lumossycophanthexcom.py
index 4b353c39..a3bf1c13 100644
--- a/fanficfare/adapters/adapter_lumossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_lumossycophanthexcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_mcstoriescom.py b/fanficfare/adapters/adapter_mcstoriescom.py
index 332cc5e3..bec4a240 100644
--- a/fanficfare/adapters/adapter_mcstoriescom.py
+++ b/fanficfare/adapters/adapter_mcstoriescom.py
@@ -16,13 +16,11 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
import urllib2
import urlparse
-import time
import os
from bs4.element import Comment
diff --git a/fanficfare/adapters/adapter_mediaminerorg.py b/fanficfare/adapters/adapter_mediaminerorg.py
index ac5eb45e..09de475d 100644
--- a/fanficfare/adapters/adapter_mediaminerorg.py
+++ b/fanficfare/adapters/adapter_mediaminerorg.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_merlinficdtwinscouk.py b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
index 41671955..39b98ad5 100644
--- a/fanficfare/adapters/adapter_merlinficdtwinscouk.py
+++ b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_midnightwhispers.py b/fanficfare/adapters/adapter_midnightwhispers.py
index 86d5a6cf..27dbe46f 100644
--- a/fanficfare/adapters/adapter_midnightwhispers.py
+++ b/fanficfare/adapters/adapter_midnightwhispers.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_nationallibrarynet.py b/fanficfare/adapters/adapter_nationallibrarynet.py
index fc8d8655..8c55f16e 100644
--- a/fanficfare/adapters/adapter_nationallibrarynet.py
+++ b/fanficfare/adapters/adapter_nationallibrarynet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_ncisficcom.py b/fanficfare/adapters/adapter_ncisficcom.py
index 6fa3f33e..c8f9953c 100644
--- a/fanficfare/adapters/adapter_ncisficcom.py
+++ b/fanficfare/adapters/adapter_ncisficcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_nfacommunitycom.py b/fanficfare/adapters/adapter_nfacommunitycom.py
index d1650176..7371e6eb 100644
--- a/fanficfare/adapters/adapter_nfacommunitycom.py
+++ b/fanficfare/adapters/adapter_nfacommunitycom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_noveltrovecom.py b/fanficfare/adapters/adapter_noveltrovecom.py
index c7907439..2eb9f5c7 100644
--- a/fanficfare/adapters/adapter_noveltrovecom.py
+++ b/fanficfare/adapters/adapter_noveltrovecom.py
@@ -23,7 +23,6 @@ import logging
import json
import re
import sys # ## used for debug purposes
-import time
import urllib2
import datetime
diff --git a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
index 01ff9228..e8e4b6cd 100644
--- a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
+++ b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_phoenixsongnet.py b/fanficfare/adapters/adapter_phoenixsongnet.py
index 05197f29..5e5fd1eb 100644
--- a/fanficfare/adapters/adapter_phoenixsongnet.py
+++ b/fanficfare/adapters/adapter_phoenixsongnet.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_ponyfictionarchivenet.py b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
index ad2ad143..39047c1f 100644
--- a/fanficfare/adapters/adapter_ponyfictionarchivenet.py
+++ b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_potionsandsnitches.py b/fanficfare/adapters/adapter_potionsandsnitches.py
index 7b9ec32f..f7db6b91 100644
--- a/fanficfare/adapters/adapter_potionsandsnitches.py
+++ b/fanficfare/adapters/adapter_potionsandsnitches.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_potterheadsanonymouscom.py b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
index bf806bec..13556e8e 100644
--- a/fanficfare/adapters/adapter_potterheadsanonymouscom.py
+++ b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_pretendercentrecom.py b/fanficfare/adapters/adapter_pretendercentrecom.py
index 76c42127..640cb2f6 100644
--- a/fanficfare/adapters/adapter_pretendercentrecom.py
+++ b/fanficfare/adapters/adapter_pretendercentrecom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_qafficcom.py b/fanficfare/adapters/adapter_qafficcom.py
index 5fba91f0..c72d4a1b 100644
--- a/fanficfare/adapters/adapter_qafficcom.py
+++ b/fanficfare/adapters/adapter_qafficcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_samandjacknet.py b/fanficfare/adapters/adapter_samandjacknet.py
index aedf5050..94d1b55d 100644
--- a/fanficfare/adapters/adapter_samandjacknet.py
+++ b/fanficfare/adapters/adapter_samandjacknet.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_scarvesandcoffeenet.py b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
index be3075d7..3382594b 100644
--- a/fanficfare/adapters/adapter_scarvesandcoffeenet.py
+++ b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_sheppardweircom.py b/fanficfare/adapters/adapter_sheppardweircom.py
index 578d8035..7c4b0131 100644
--- a/fanficfare/adapters/adapter_sheppardweircom.py
+++ b/fanficfare/adapters/adapter_sheppardweircom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_siyecouk.py b/fanficfare/adapters/adapter_siyecouk.py
index 9c2b808e..70d4032f 100644
--- a/fanficfare/adapters/adapter_siyecouk.py
+++ b/fanficfare/adapters/adapter_siyecouk.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_squidgeorgpeja.py b/fanficfare/adapters/adapter_squidgeorgpeja.py
index 2e8e84be..4ad0c3d3 100644
--- a/fanficfare/adapters/adapter_squidgeorgpeja.py
+++ b/fanficfare/adapters/adapter_squidgeorgpeja.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_storiesofardacom.py b/fanficfare/adapters/adapter_storiesofardacom.py
index 388191ee..2cd1dd15 100644
--- a/fanficfare/adapters/adapter_storiesofardacom.py
+++ b/fanficfare/adapters/adapter_storiesofardacom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_sugarquillnet.py b/fanficfare/adapters/adapter_sugarquillnet.py
index a9160b32..32e04b0c 100644
--- a/fanficfare/adapters/adapter_sugarquillnet.py
+++ b/fanficfare/adapters/adapter_sugarquillnet.py
@@ -27,7 +27,6 @@
### formatting as I thought I could.
#############################################################################
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_tenhawkpresentscom.py b/fanficfare/adapters/adapter_tenhawkpresentscom.py
index 1d87abdd..31049278 100644
--- a/fanficfare/adapters/adapter_tenhawkpresentscom.py
+++ b/fanficfare/adapters/adapter_tenhawkpresentscom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_themasquenet.py b/fanficfare/adapters/adapter_themasquenet.py
index b040ca1a..23e5ac4c 100644
--- a/fanficfare/adapters/adapter_themasquenet.py
+++ b/fanficfare/adapters/adapter_themasquenet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_thepetulantpoetesscom.py b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
index bf1fd8f1..834882f3 100644
--- a/fanficfare/adapters/adapter_thepetulantpoetesscom.py
+++ b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_tomparisdormcom.py b/fanficfare/adapters/adapter_tomparisdormcom.py
index aba06597..e5b03133 100644
--- a/fanficfare/adapters/adapter_tomparisdormcom.py
+++ b/fanficfare/adapters/adapter_tomparisdormcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_trekiverseorg.py b/fanficfare/adapters/adapter_trekiverseorg.py
index 52da5f39..fd9ccf46 100644
--- a/fanficfare/adapters/adapter_trekiverseorg.py
+++ b/fanficfare/adapters/adapter_trekiverseorg.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_tthfanficorg.py b/fanficfare/adapters/adapter_tthfanficorg.py
index d0954fa4..0af384be 100644
--- a/fanficfare/adapters/adapter_tthfanficorg.py
+++ b/fanficfare/adapters/adapter_tthfanficorg.py
@@ -16,13 +16,10 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
import urllib2
-import time
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
diff --git a/fanficfare/adapters/adapter_twilightarchivescom.py b/fanficfare/adapters/adapter_twilightarchivescom.py
index 40a6bd0c..f322f85a 100644
--- a/fanficfare/adapters/adapter_twilightarchivescom.py
+++ b/fanficfare/adapters/adapter_twilightarchivescom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_twilightednet.py b/fanficfare/adapters/adapter_twilightednet.py
index b9671d5a..a2cab28b 100644
--- a/fanficfare/adapters/adapter_twilightednet.py
+++ b/fanficfare/adapters/adapter_twilightednet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_unknowableroomorg.py b/fanficfare/adapters/adapter_unknowableroomorg.py
index aac1bfba..487422a0 100644
--- a/fanficfare/adapters/adapter_unknowableroomorg.py
+++ b/fanficfare/adapters/adapter_unknowableroomorg.py
@@ -21,7 +21,6 @@ from __future__ import absolute_import
''' This adapter will download stories from the site unknowableroom.org '''
import logging
import re
-import time
import urllib2
import sys
diff --git a/fanficfare/adapters/adapter_walkingtheplankorg.py b/fanficfare/adapters/adapter_walkingtheplankorg.py
index 45b1ac35..d1e05c40 100644
--- a/fanficfare/adapters/adapter_walkingtheplankorg.py
+++ b/fanficfare/adapters/adapter_walkingtheplankorg.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_whoficcom.py b/fanficfare/adapters/adapter_whoficcom.py
index 2f6c92f9..7d83e015 100644
--- a/fanficfare/adapters/adapter_whoficcom.py
+++ b/fanficfare/adapters/adapter_whoficcom.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_wolverineandroguecom.py b/fanficfare/adapters/adapter_wolverineandroguecom.py
index 9d450156..1cc5d999 100644
--- a/fanficfare/adapters/adapter_wolverineandroguecom.py
+++ b/fanficfare/adapters/adapter_wolverineandroguecom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_wraithbaitcom.py b/fanficfare/adapters/adapter_wraithbaitcom.py
index 06f3c751..2d6d1b93 100644
--- a/fanficfare/adapters/adapter_wraithbaitcom.py
+++ b/fanficfare/adapters/adapter_wraithbaitcom.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
index b7a0aa77..1ff0101e 100644
--- a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
+++ b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
@@ -17,7 +17,6 @@
# Software: eFiction
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_wwwaneroticstorycom.py b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
index 3fe4299c..f399c530 100644
--- a/fanficfare/adapters/adapter_wwwaneroticstorycom.py
+++ b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
@@ -20,7 +20,6 @@ import logging
import os
import re
import sys
-import time
import urllib2
import urlparse
diff --git a/fanficfare/adapters/adapter_wwwarea52hkhnet.py b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
index d15d7453..86d669e5 100644
--- a/fanficfare/adapters/adapter_wwwarea52hkhnet.py
+++ b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
@@ -25,7 +25,6 @@
### stories have the authorinfo div, and to make it more systematic
#############################################################################
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_wwwlushstoriescom.py b/fanficfare/adapters/adapter_wwwlushstoriescom.py
index 6fa24cd6..1ed17ea6 100644
--- a/fanficfare/adapters/adapter_wwwlushstoriescom.py
+++ b/fanficfare/adapters/adapter_wwwlushstoriescom.py
@@ -21,7 +21,6 @@
####################################################################################################
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/adapter_wwwutopiastoriescom.py b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
index 68f56ddf..a13bdf8d 100644
--- a/fanficfare/adapters/adapter_wwwutopiastoriescom.py
+++ b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
@@ -29,7 +29,6 @@ This site is much link fictionmania, in that there is only one chapter per
story, so we only have the one url to get information from.
We get the category from the author's page
'''
-import time
import logging
logger = logging.getLogger(__name__)
import re
diff --git a/fanficfare/adapters/base_xenforoforum_adapter.py b/fanficfare/adapters/base_xenforoforum_adapter.py
index d5ea1fec..b5549495 100644
--- a/fanficfare/adapters/base_xenforoforum_adapter.py
+++ b/fanficfare/adapters/base_xenforoforum_adapter.py
@@ -16,7 +16,6 @@
#
from __future__ import absolute_import
-import time
import logging
logger = logging.getLogger(__name__)
import re
From eb29c0b78f49dfb0fa01bc33daa81b924131e4f3 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 14:51:24 -0500
Subject: [PATCH 026/120] from .base_adapter
---
fanficfare/adapters/adapter_adastrafanficcom.py | 2 +-
fanficfare/adapters/adapter_adultfanfictionorg.py | 2 +-
fanficfare/adapters/adapter_andromedawebcom.py | 2 +-
fanficfare/adapters/adapter_archiveofourownorg.py | 2 +-
fanficfare/adapters/adapter_archiveskyehawkecom.py | 2 +-
fanficfare/adapters/adapter_artemisfowlcom.py | 2 +-
fanficfare/adapters/adapter_asexstoriescom.py | 2 +-
fanficfare/adapters/adapter_ashwindersycophanthexcom.py | 2 +-
fanficfare/adapters/adapter_asianfanficscom.py | 2 +-
fanficfare/adapters/adapter_asr3slashzoneorg.py | 2 +-
fanficfare/adapters/adapter_bdsmlibrarycom.py | 2 +-
fanficfare/adapters/adapter_bloodshedversecom.py | 2 +-
fanficfare/adapters/adapter_bloodtiesfancom.py | 2 +-
fanficfare/adapters/adapter_buffygilescom.py | 2 +-
fanficfare/adapters/adapter_chaossycophanthexcom.py | 2 +-
fanficfare/adapters/adapter_chosentwofanficcom.py | 2 +-
fanficfare/adapters/adapter_csiforensicscom.py | 2 +-
fanficfare/adapters/adapter_darksolaceorg.py | 2 +-
fanficfare/adapters/adapter_deandamagecom.py | 2 +-
fanficfare/adapters/adapter_deepinmysoulnet.py | 2 +-
fanficfare/adapters/adapter_destinysgatewaycom.py | 2 +-
fanficfare/adapters/adapter_dokugacom.py | 2 +-
fanficfare/adapters/adapter_dracoandginnycom.py | 2 +-
fanficfare/adapters/adapter_dramioneorg.py | 2 +-
fanficfare/adapters/adapter_efictionestelielde.py | 2 +-
fanficfare/adapters/adapter_efpfanficnet.py | 2 +-
fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py | 2 +-
fanficfare/adapters/adapter_fanficauthorsnet.py | 2 +-
fanficfare/adapters/adapter_fanficcastletvnet.py | 2 +-
fanficfare/adapters/adapter_fanfichu.py | 2 +-
fanficfare/adapters/adapter_fanfictionjunkiesde.py | 2 +-
fanficfare/adapters/adapter_fanfiktionde.py | 2 +-
fanficfare/adapters/adapter_fannation.py | 2 +-
fanficfare/adapters/adapter_fhsarchivecom.py | 2 +-
fanficfare/adapters/adapter_ficbooknet.py | 2 +-
fanficfare/adapters/adapter_fictionalleyorg.py | 2 +-
fanficfare/adapters/adapter_fictionhuntcom.py | 2 +-
fanficfare/adapters/adapter_fictionmaniatv.py | 2 +-
fanficfare/adapters/adapter_fictionpadcom.py | 2 +-
fanficfare/adapters/adapter_ficwadcom.py | 2 +-
fanficfare/adapters/adapter_fimfictionnet.py | 2 +-
fanficfare/adapters/adapter_fireflyfansnet.py | 2 +-
fanficfare/adapters/adapter_fireflypopulliorg.py | 2 +-
fanficfare/adapters/adapter_forumquestionablequestingcom.py | 2 +-
fanficfare/adapters/adapter_forumssufficientvelocitycom.py | 2 +-
fanficfare/adapters/adapter_gluttonyfictioncom.py | 2 +-
fanficfare/adapters/adapter_gravitytalescom.py | 2 +-
fanficfare/adapters/adapter_harrypotterfanfictioncom.py | 2 +-
fanficfare/adapters/adapter_hlfictionnet.py | 2 +-
fanficfare/adapters/adapter_hpfanficarchivecom.py | 2 +-
fanficfare/adapters/adapter_iketernalnet.py | 2 +-
fanficfare/adapters/adapter_imagineeficcom.py | 2 +-
fanficfare/adapters/adapter_imrightbehindyoucom.py | 2 +-
fanficfare/adapters/adapter_inkbunnynet.py | 2 +-
fanficfare/adapters/adapter_itcouldhappennet.py | 2 +-
fanficfare/adapters/adapter_kiarepositorymujajinet.py | 2 +-
fanficfare/adapters/adapter_ksarchivecom.py | 2 +-
fanficfare/adapters/adapter_lcfanficcom.py | 2 +-
fanficfare/adapters/adapter_libraryofmoriacom.py | 2 +-
fanficfare/adapters/adapter_lightnovelgatecom.py | 2 +-
fanficfare/adapters/adapter_literotica.py | 2 +-
fanficfare/adapters/adapter_looselugscom.py | 2 +-
fanficfare/adapters/adapter_lotrfanfictioncom.py | 2 +-
fanficfare/adapters/adapter_lotrgficcom.py | 2 +-
fanficfare/adapters/adapter_lumossycophanthexcom.py | 2 +-
fanficfare/adapters/adapter_masseffect2in.py | 2 +-
fanficfare/adapters/adapter_mcstoriescom.py | 2 +-
fanficfare/adapters/adapter_mediaminerorg.py | 2 +-
fanficfare/adapters/adapter_merlinficdtwinscouk.py | 2 +-
fanficfare/adapters/adapter_midnightwhispers.py | 2 +-
fanficfare/adapters/adapter_mttjustoncenet.py | 2 +-
fanficfare/adapters/adapter_naiceanilmenet.py | 2 +-
fanficfare/adapters/adapter_narutoficorg.py | 2 +-
fanficfare/adapters/adapter_nationallibrarynet.py | 2 +-
fanficfare/adapters/adapter_ncisficcom.py | 2 +-
fanficfare/adapters/adapter_ncisfictioncom.py | 2 +-
fanficfare/adapters/adapter_nfacommunitycom.py | 2 +-
fanficfare/adapters/adapter_nhamagicalworldsus.py | 2 +-
fanficfare/adapters/adapter_ninelivesarchivecom.py | 2 +-
fanficfare/adapters/adapter_nocturnallightnet.py | 2 +-
fanficfare/adapters/adapter_noveltrovecom.py | 2 +-
fanficfare/adapters/adapter_occlumencysycophanthexcom.py | 2 +-
fanficfare/adapters/adapter_phoenixsongnet.py | 2 +-
fanficfare/adapters/adapter_ponyfictionarchivenet.py | 2 +-
fanficfare/adapters/adapter_potionsandsnitches.py | 2 +-
fanficfare/adapters/adapter_potterficscom.py | 2 +-
fanficfare/adapters/adapter_potterheadsanonymouscom.py | 2 +-
fanficfare/adapters/adapter_pretendercentrecom.py | 2 +-
fanficfare/adapters/adapter_qafficcom.py | 2 +-
fanficfare/adapters/adapter_quotevcom.py | 2 +-
fanficfare/adapters/adapter_royalroadl.py | 2 +-
fanficfare/adapters/adapter_samandjacknet.py | 2 +-
fanficfare/adapters/adapter_scarvesandcoffeenet.py | 2 +-
fanficfare/adapters/adapter_sebklainenet.py | 2 +-
fanficfare/adapters/adapter_sheppardweircom.py | 2 +-
fanficfare/adapters/adapter_shriftweborgbfa.py | 2 +-
fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py | 2 +-
fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py | 2 +-
fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py | 2 +-
fanficfare/adapters/adapter_siyecouk.py | 2 +-
fanficfare/adapters/adapter_spikeluvercom.py | 2 +-
fanficfare/adapters/adapter_squidgeorgpeja.py | 2 +-
fanficfare/adapters/adapter_starskyhutcharchivenet.py | 2 +-
fanficfare/adapters/adapter_starslibrarynet.py | 2 +-
fanficfare/adapters/adapter_storiesofardacom.py | 2 +-
fanficfare/adapters/adapter_storiesonlinenet.py | 2 +-
fanficfare/adapters/adapter_sugarquillnet.py | 2 +-
fanficfare/adapters/adapter_swordborderlineangelcom.py | 2 +-
fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py | 2 +-
fanficfare/adapters/adapter_tenhawkpresentscom.py | 2 +-
fanficfare/adapters/adapter_tgstorytimecom.py | 2 +-
fanficfare/adapters/adapter_thebrokenworldorg.py | 2 +-
fanficfare/adapters/adapter_thedelphicexpansecom.py | 2 +-
fanficfare/adapters/adapter_thehookupzonenet.py | 2 +-
fanficfare/adapters/adapter_themaplebookshelf.py | 2 +-
fanficfare/adapters/adapter_themasquenet.py | 2 +-
fanficfare/adapters/adapter_thepetulantpoetesscom.py | 2 +-
fanficfare/adapters/adapter_thundercatsfansorg.py | 2 +-
fanficfare/adapters/adapter_tolkienfanfiction.py | 2 +-
fanficfare/adapters/adapter_tomparisdormcom.py | 2 +-
fanficfare/adapters/adapter_trekfanfictionnet.py | 2 +-
fanficfare/adapters/adapter_trekiverseorg.py | 2 +-
fanficfare/adapters/adapter_tthfanficorg.py | 2 +-
fanficfare/adapters/adapter_twilightarchivescom.py | 2 +-
fanficfare/adapters/adapter_twilightednet.py | 2 +-
fanficfare/adapters/adapter_unknowableroomorg.py | 2 +-
fanficfare/adapters/adapter_valentchambercom.py | 2 +-
fanficfare/adapters/adapter_voracity2eficcom.py | 2 +-
fanficfare/adapters/adapter_walkingtheplankorg.py | 2 +-
fanficfare/adapters/adapter_webnovelcom.py | 2 +-
fanficfare/adapters/adapter_whoficcom.py | 2 +-
fanficfare/adapters/adapter_wolverineandroguecom.py | 2 +-
fanficfare/adapters/adapter_wraithbaitcom.py | 2 +-
fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py | 2 +-
fanficfare/adapters/adapter_wuxiaworldco.py | 2 +-
fanficfare/adapters/adapter_wuxiaworldcom.py | 2 +-
fanficfare/adapters/adapter_www13hoursorg.py | 2 +-
fanficfare/adapters/adapter_wwwaneroticstorycom.py | 2 +-
fanficfare/adapters/adapter_wwwarea52hkhnet.py | 2 +-
fanficfare/adapters/adapter_wwwgiantessworldnet.py | 2 +-
fanficfare/adapters/adapter_wwwlushstoriescom.py | 2 +-
fanficfare/adapters/adapter_wwwnovelallcom.py | 2 +-
fanficfare/adapters/adapter_wwwutopiastoriescom.py | 2 +-
143 files changed, 143 insertions(+), 143 deletions(-)
diff --git a/fanficfare/adapters/adapter_adastrafanficcom.py b/fanficfare/adapters/adapter_adastrafanficcom.py
index 50c58dbf..f8da90e4 100644
--- a/fanficfare/adapters/adapter_adastrafanficcom.py
+++ b/fanficfare/adapters/adapter_adastrafanficcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class AdAstraFanficComSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_adultfanfictionorg.py b/fanficfare/adapters/adapter_adultfanfictionorg.py
index b6b465a6..2776ffe9 100644
--- a/fanficfare/adapters/adapter_adultfanfictionorg.py
+++ b/fanficfare/adapters/adapter_adultfanfictionorg.py
@@ -29,7 +29,7 @@ from bs4 import UnicodeDammit
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
################################################################################
diff --git a/fanficfare/adapters/adapter_andromedawebcom.py b/fanficfare/adapters/adapter_andromedawebcom.py
index ab7537be..4a35e11e 100644
--- a/fanficfare/adapters/adapter_andromedawebcom.py
+++ b/fanficfare/adapters/adapter_andromedawebcom.py
@@ -28,7 +28,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return AndromedaWebComAdapter # XXX
diff --git a/fanficfare/adapters/adapter_archiveofourownorg.py b/fanficfare/adapters/adapter_archiveofourownorg.py
index f709817c..489a824e 100644
--- a/fanficfare/adapters/adapter_archiveofourownorg.py
+++ b/fanficfare/adapters/adapter_archiveofourownorg.py
@@ -25,7 +25,7 @@ import json
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ArchiveOfOurOwnOrgAdapter
diff --git a/fanficfare/adapters/adapter_archiveskyehawkecom.py b/fanficfare/adapters/adapter_archiveskyehawkecom.py
index dffd2cda..b2307635 100644
--- a/fanficfare/adapters/adapter_archiveskyehawkecom.py
+++ b/fanficfare/adapters/adapter_archiveskyehawkecom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
diff --git a/fanficfare/adapters/adapter_artemisfowlcom.py b/fanficfare/adapters/adapter_artemisfowlcom.py
index eccc754c..e446cb2e 100644
--- a/fanficfare/adapters/adapter_artemisfowlcom.py
+++ b/fanficfare/adapters/adapter_artemisfowlcom.py
@@ -28,7 +28,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ArtemisFowlComAdapter # XXX
diff --git a/fanficfare/adapters/adapter_asexstoriescom.py b/fanficfare/adapters/adapter_asexstoriescom.py
index a11728c3..3b1d0490 100644
--- a/fanficfare/adapters/adapter_asexstoriescom.py
+++ b/fanficfare/adapters/adapter_asexstoriescom.py
@@ -28,7 +28,7 @@ from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
import sys
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ASexStoriesComAdapter
diff --git a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
index 48e03ddb..7cb8ff93 100644
--- a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
+++ b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return AshwinderSycophantHexComAdapter
diff --git a/fanficfare/adapters/adapter_asianfanficscom.py b/fanficfare/adapters/adapter_asianfanficscom.py
index f7f7b920..a14818b3 100644
--- a/fanficfare/adapters/adapter_asianfanficscom.py
+++ b/fanficfare/adapters/adapter_asianfanficscom.py
@@ -9,7 +9,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return AsianFanFicsComAdapter
diff --git a/fanficfare/adapters/adapter_asr3slashzoneorg.py b/fanficfare/adapters/adapter_asr3slashzoneorg.py
index 8365266e..7c678559 100644
--- a/fanficfare/adapters/adapter_asr3slashzoneorg.py
+++ b/fanficfare/adapters/adapter_asr3slashzoneorg.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return Asr3SlashzoneOrgAdapter
diff --git a/fanficfare/adapters/adapter_bdsmlibrarycom.py b/fanficfare/adapters/adapter_bdsmlibrarycom.py
index a45d8324..305c6a51 100644
--- a/fanficfare/adapters/adapter_bdsmlibrarycom.py
+++ b/fanficfare/adapters/adapter_bdsmlibrarycom.py
@@ -59,7 +59,7 @@ from bs4 import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return BDSMLibraryComSiteAdapter
diff --git a/fanficfare/adapters/adapter_bloodshedversecom.py b/fanficfare/adapters/adapter_bloodshedversecom.py
index b040eba6..4c3cb87a 100644
--- a/fanficfare/adapters/adapter_bloodshedversecom.py
+++ b/fanficfare/adapters/adapter_bloodshedversecom.py
@@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
from bs4 import BeautifulSoup
from ..htmlcleanup import stripHTML
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
diff --git a/fanficfare/adapters/adapter_bloodtiesfancom.py b/fanficfare/adapters/adapter_bloodtiesfancom.py
index e8bdf286..c8f35a42 100644
--- a/fanficfare/adapters/adapter_bloodtiesfancom.py
+++ b/fanficfare/adapters/adapter_bloodtiesfancom.py
@@ -26,7 +26,7 @@ from bs4.element import Tag
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
# adapter_fanficcastletvnet.py is the best choice for learning to
diff --git a/fanficfare/adapters/adapter_buffygilescom.py b/fanficfare/adapters/adapter_buffygilescom.py
index a9b1d0c5..f1eb696b 100644
--- a/fanficfare/adapters/adapter_buffygilescom.py
+++ b/fanficfare/adapters/adapter_buffygilescom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return BuffyGilesComAdapter
diff --git a/fanficfare/adapters/adapter_chaossycophanthexcom.py b/fanficfare/adapters/adapter_chaossycophanthexcom.py
index dc1dac76..448ae1a8 100644
--- a/fanficfare/adapters/adapter_chaossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_chaossycophanthexcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ChaosSycophantHexComAdapter
diff --git a/fanficfare/adapters/adapter_chosentwofanficcom.py b/fanficfare/adapters/adapter_chosentwofanficcom.py
index 862896f9..f7e0b984 100644
--- a/fanficfare/adapters/adapter_chosentwofanficcom.py
+++ b/fanficfare/adapters/adapter_chosentwofanficcom.py
@@ -27,7 +27,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ChosenTwoFanFicArchiveAdapter
diff --git a/fanficfare/adapters/adapter_csiforensicscom.py b/fanficfare/adapters/adapter_csiforensicscom.py
index e5f8057a..2358ed8b 100644
--- a/fanficfare/adapters/adapter_csiforensicscom.py
+++ b/fanficfare/adapters/adapter_csiforensicscom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
diff --git a/fanficfare/adapters/adapter_darksolaceorg.py b/fanficfare/adapters/adapter_darksolaceorg.py
index fd31765c..173b3548 100644
--- a/fanficfare/adapters/adapter_darksolaceorg.py
+++ b/fanficfare/adapters/adapter_darksolaceorg.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class DarkSolaceOrgAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_deandamagecom.py b/fanficfare/adapters/adapter_deandamagecom.py
index 7c3780a1..9f0c1446 100644
--- a/fanficfare/adapters/adapter_deandamagecom.py
+++ b/fanficfare/adapters/adapter_deandamagecom.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class DeanDamageComSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_deepinmysoulnet.py b/fanficfare/adapters/adapter_deepinmysoulnet.py
index 81eae44a..b690ae94 100644
--- a/fanficfare/adapters/adapter_deepinmysoulnet.py
+++ b/fanficfare/adapters/adapter_deepinmysoulnet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return DeepInMySoulNetAdapter ## XXX
diff --git a/fanficfare/adapters/adapter_destinysgatewaycom.py b/fanficfare/adapters/adapter_destinysgatewaycom.py
index 39e6426c..db5f4cec 100644
--- a/fanficfare/adapters/adapter_destinysgatewaycom.py
+++ b/fanficfare/adapters/adapter_destinysgatewaycom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return DestinysGatewayComAdapter
diff --git a/fanficfare/adapters/adapter_dokugacom.py b/fanficfare/adapters/adapter_dokugacom.py
index bba8a5f5..dc19fe5c 100644
--- a/fanficfare/adapters/adapter_dokugacom.py
+++ b/fanficfare/adapters/adapter_dokugacom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return DokugaComAdapter
diff --git a/fanficfare/adapters/adapter_dracoandginnycom.py b/fanficfare/adapters/adapter_dracoandginnycom.py
index 49433492..2876faf8 100644
--- a/fanficfare/adapters/adapter_dracoandginnycom.py
+++ b/fanficfare/adapters/adapter_dracoandginnycom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return DracoAndGinnyComAdapter
diff --git a/fanficfare/adapters/adapter_dramioneorg.py b/fanficfare/adapters/adapter_dramioneorg.py
index 4d622df3..c626115f 100644
--- a/fanficfare/adapters/adapter_dramioneorg.py
+++ b/fanficfare/adapters/adapter_dramioneorg.py
@@ -26,7 +26,7 @@ from bs4.element import Tag
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return DramioneOrgAdapter
diff --git a/fanficfare/adapters/adapter_efictionestelielde.py b/fanficfare/adapters/adapter_efictionestelielde.py
index 6b669798..66a0fce9 100644
--- a/fanficfare/adapters/adapter_efictionestelielde.py
+++ b/fanficfare/adapters/adapter_efictionestelielde.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return EfictionEstelielDeAdapter
diff --git a/fanficfare/adapters/adapter_efpfanficnet.py b/fanficfare/adapters/adapter_efpfanficnet.py
index da2bc1ec..962f3d7f 100644
--- a/fanficfare/adapters/adapter_efpfanficnet.py
+++ b/fanficfare/adapters/adapter_efpfanficnet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return EFPFanFicNet
diff --git a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
index 05d0d5de..cd40a9fb 100644
--- a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
+++ b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ErosnSapphoSycophantHexComAdapter
diff --git a/fanficfare/adapters/adapter_fanficauthorsnet.py b/fanficfare/adapters/adapter_fanficauthorsnet.py
index 2cbfb20e..1bfe586c 100644
--- a/fanficfare/adapters/adapter_fanficauthorsnet.py
+++ b/fanficfare/adapters/adapter_fanficauthorsnet.py
@@ -30,7 +30,7 @@ from bs4 import UnicodeDammit, Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
####################################################################################################
def getClass():
diff --git a/fanficfare/adapters/adapter_fanficcastletvnet.py b/fanficfare/adapters/adapter_fanficcastletvnet.py
index 0d37dd2d..61d8f0a2 100644
--- a/fanficfare/adapters/adapter_fanficcastletvnet.py
+++ b/fanficfare/adapters/adapter_fanficcastletvnet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# In general an 'adapter' needs to do these five things:
diff --git a/fanficfare/adapters/adapter_fanfichu.py b/fanficfare/adapters/adapter_fanfichu.py
index 59f702d7..3e59d121 100644
--- a/fanficfare/adapters/adapter_fanfichu.py
+++ b/fanficfare/adapters/adapter_fanfichu.py
@@ -20,7 +20,7 @@ import re
import urllib2
import urlparse
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
diff --git a/fanficfare/adapters/adapter_fanfictionjunkiesde.py b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
index c6694b1d..523ab771 100644
--- a/fanficfare/adapters/adapter_fanfictionjunkiesde.py
+++ b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
# adapter_fanficcastletvnet.py is the best choice for learning to
diff --git a/fanficfare/adapters/adapter_fanfiktionde.py b/fanficfare/adapters/adapter_fanfiktionde.py
index ad59a421..632185ba 100644
--- a/fanficfare/adapters/adapter_fanfiktionde.py
+++ b/fanficfare/adapters/adapter_fanfiktionde.py
@@ -28,7 +28,7 @@ from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return FanFiktionDeAdapter
diff --git a/fanficfare/adapters/adapter_fannation.py b/fanficfare/adapters/adapter_fannation.py
index 7a06bb26..6a74ac0f 100644
--- a/fanficfare/adapters/adapter_fannation.py
+++ b/fanficfare/adapters/adapter_fannation.py
@@ -18,7 +18,7 @@
# Software: eFiction
from __future__ import absolute_import
import re
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class FanNationAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_fhsarchivecom.py b/fanficfare/adapters/adapter_fhsarchivecom.py
index 27479da4..a9b0c809 100644
--- a/fanficfare/adapters/adapter_fhsarchivecom.py
+++ b/fanficfare/adapters/adapter_fhsarchivecom.py
@@ -18,7 +18,7 @@
# Software: eFiction
from __future__ import absolute_import
import re
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class FHSArchiveComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_ficbooknet.py b/fanficfare/adapters/adapter_ficbooknet.py
index a8f74742..e4000552 100644
--- a/fanficfare/adapters/adapter_ficbooknet.py
+++ b/fanficfare/adapters/adapter_ficbooknet.py
@@ -27,7 +27,7 @@ from .. import translit
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
diff --git a/fanficfare/adapters/adapter_fictionalleyorg.py b/fanficfare/adapters/adapter_fictionalleyorg.py
index a8c35f03..ba6f32d4 100644
--- a/fanficfare/adapters/adapter_fictionalleyorg.py
+++ b/fanficfare/adapters/adapter_fictionalleyorg.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class FictionAlleyOrgSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_fictionhuntcom.py b/fanficfare/adapters/adapter_fictionhuntcom.py
index 17ed001e..98dcbe03 100644
--- a/fanficfare/adapters/adapter_fictionhuntcom.py
+++ b/fanficfare/adapters/adapter_fictionhuntcom.py
@@ -24,7 +24,7 @@ import urllib2
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class FictionHuntComSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_fictionmaniatv.py b/fanficfare/adapters/adapter_fictionmaniatv.py
index 2506e8a3..a79e7685 100644
--- a/fanficfare/adapters/adapter_fictionmaniatv.py
+++ b/fanficfare/adapters/adapter_fictionmaniatv.py
@@ -3,7 +3,7 @@ import re
import urllib2
import urlparse
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
diff --git a/fanficfare/adapters/adapter_fictionpadcom.py b/fanficfare/adapters/adapter_fictionpadcom.py
index 5cebbc50..a0066082 100644
--- a/fanficfare/adapters/adapter_fictionpadcom.py
+++ b/fanficfare/adapters/adapter_fictionpadcom.py
@@ -26,7 +26,7 @@ import json
#from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class FictionPadSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_ficwadcom.py b/fanficfare/adapters/adapter_ficwadcom.py
index ba0fc21e..725584da 100644
--- a/fanficfare/adapters/adapter_ficwadcom.py
+++ b/fanficfare/adapters/adapter_ficwadcom.py
@@ -25,7 +25,7 @@ import httplib, urllib
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class FicwadComSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_fimfictionnet.py b/fanficfare/adapters/adapter_fimfictionnet.py
index 03f0c9ec..88c4000a 100644
--- a/fanficfare/adapters/adapter_fimfictionnet.py
+++ b/fanficfare/adapters/adapter_fimfictionnet.py
@@ -28,7 +28,7 @@ import json
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return FimFictionNetSiteAdapter
diff --git a/fanficfare/adapters/adapter_fireflyfansnet.py b/fanficfare/adapters/adapter_fireflyfansnet.py
index 99550358..27ae6b44 100644
--- a/fanficfare/adapters/adapter_fireflyfansnet.py
+++ b/fanficfare/adapters/adapter_fireflyfansnet.py
@@ -24,7 +24,7 @@ import re
import sys
import urllib2
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_fireflypopulliorg.py b/fanficfare/adapters/adapter_fireflypopulliorg.py
index 97f64aa1..48b45cc8 100644
--- a/fanficfare/adapters/adapter_fireflypopulliorg.py
+++ b/fanficfare/adapters/adapter_fireflypopulliorg.py
@@ -28,7 +28,7 @@ import re
import urllib2
import sys
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_forumquestionablequestingcom.py b/fanficfare/adapters/adapter_forumquestionablequestingcom.py
index bfeab8cf..d67b2627 100644
--- a/fanficfare/adapters/adapter_forumquestionablequestingcom.py
+++ b/fanficfare/adapters/adapter_forumquestionablequestingcom.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
import re
from ..htmlcleanup import stripHTML
-from base_xenforoforum_adapter import BaseXenForoForumAdapter
+from .base_xenforoforum_adapter import BaseXenForoForumAdapter
def getClass():
return QuestionablequestingComAdapter
diff --git a/fanficfare/adapters/adapter_forumssufficientvelocitycom.py b/fanficfare/adapters/adapter_forumssufficientvelocitycom.py
index 3222d1b0..c9b09b56 100644
--- a/fanficfare/adapters/adapter_forumssufficientvelocitycom.py
+++ b/fanficfare/adapters/adapter_forumssufficientvelocitycom.py
@@ -18,7 +18,7 @@
from __future__ import absolute_import
import re
-from base_xenforoforum_adapter import BaseXenForoForumAdapter
+from .base_xenforoforum_adapter import BaseXenForoForumAdapter
def getClass():
return ForumsSufficientVelocityComAdapter
diff --git a/fanficfare/adapters/adapter_gluttonyfictioncom.py b/fanficfare/adapters/adapter_gluttonyfictioncom.py
index 306abf92..b9ee9207 100644
--- a/fanficfare/adapters/adapter_gluttonyfictioncom.py
+++ b/fanficfare/adapters/adapter_gluttonyfictioncom.py
@@ -22,7 +22,7 @@
### Original was adapter_fannation.py
##################################################################################
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class GluttonyFictionComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_gravitytalescom.py b/fanficfare/adapters/adapter_gravitytalescom.py
index 720eee64..c2551217 100644
--- a/fanficfare/adapters/adapter_gravitytalescom.py
+++ b/fanficfare/adapters/adapter_gravitytalescom.py
@@ -37,7 +37,7 @@ except ImportError:
# logger.warn('No version of feedparser module available, falling back to naive published and updated date')
feedparser = None
-from base_adapter import BaseSiteAdapter
+from .base_adapter import BaseSiteAdapter
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_harrypotterfanfictioncom.py b/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
index 3b1d172f..d95133a1 100644
--- a/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
+++ b/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class HarryPotterFanFictionComSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_hlfictionnet.py b/fanficfare/adapters/adapter_hlfictionnet.py
index 7c67da25..c1bc6338 100644
--- a/fanficfare/adapters/adapter_hlfictionnet.py
+++ b/fanficfare/adapters/adapter_hlfictionnet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return HLFictionNetAdapter
diff --git a/fanficfare/adapters/adapter_hpfanficarchivecom.py b/fanficfare/adapters/adapter_hpfanficarchivecom.py
index 58a086a2..7820405e 100644
--- a/fanficfare/adapters/adapter_hpfanficarchivecom.py
+++ b/fanficfare/adapters/adapter_hpfanficarchivecom.py
@@ -26,7 +26,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return HPFanficArchiveComAdapter
diff --git a/fanficfare/adapters/adapter_iketernalnet.py b/fanficfare/adapters/adapter_iketernalnet.py
index be15c64d..7a2027c1 100644
--- a/fanficfare/adapters/adapter_iketernalnet.py
+++ b/fanficfare/adapters/adapter_iketernalnet.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return IkEternalNetAdapter
diff --git a/fanficfare/adapters/adapter_imagineeficcom.py b/fanficfare/adapters/adapter_imagineeficcom.py
index 15c64028..e1ba1be2 100644
--- a/fanficfare/adapters/adapter_imagineeficcom.py
+++ b/fanficfare/adapters/adapter_imagineeficcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ImagineEFicComAdapter
diff --git a/fanficfare/adapters/adapter_imrightbehindyoucom.py b/fanficfare/adapters/adapter_imrightbehindyoucom.py
index 349c1d8b..48fbd3bc 100644
--- a/fanficfare/adapters/adapter_imrightbehindyoucom.py
+++ b/fanficfare/adapters/adapter_imrightbehindyoucom.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class ImRightBehindYouComSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_inkbunnynet.py b/fanficfare/adapters/adapter_inkbunnynet.py
index ce30351e..6c4d718f 100644
--- a/fanficfare/adapters/adapter_inkbunnynet.py
+++ b/fanficfare/adapters/adapter_inkbunnynet.py
@@ -25,7 +25,7 @@ import sys
import urllib2
from datetime import datetime, timedelta
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_itcouldhappennet.py b/fanficfare/adapters/adapter_itcouldhappennet.py
index c8860e85..75d6171a 100644
--- a/fanficfare/adapters/adapter_itcouldhappennet.py
+++ b/fanficfare/adapters/adapter_itcouldhappennet.py
@@ -18,7 +18,7 @@
# Software: eFiction
from __future__ import absolute_import
import re
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class ItCouldHappenNetSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_kiarepositorymujajinet.py b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
index d83a0c27..ec4bfbd1 100644
--- a/fanficfare/adapters/adapter_kiarepositorymujajinet.py
+++ b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return KiaRepositoryMujajiNetAdapter ## XXX
diff --git a/fanficfare/adapters/adapter_ksarchivecom.py b/fanficfare/adapters/adapter_ksarchivecom.py
index 312a19b3..9837e9c6 100644
--- a/fanficfare/adapters/adapter_ksarchivecom.py
+++ b/fanficfare/adapters/adapter_ksarchivecom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# Search for XXX comments--that's where things are most likely to need changing.
diff --git a/fanficfare/adapters/adapter_lcfanficcom.py b/fanficfare/adapters/adapter_lcfanficcom.py
index 8ccdf2b6..d0088687 100644
--- a/fanficfare/adapters/adapter_lcfanficcom.py
+++ b/fanficfare/adapters/adapter_lcfanficcom.py
@@ -26,7 +26,7 @@ import sys # ## used for debug purposes
import urllib2
import datetime
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_libraryofmoriacom.py b/fanficfare/adapters/adapter_libraryofmoriacom.py
index 21e0ee55..88d7a847 100644
--- a/fanficfare/adapters/adapter_libraryofmoriacom.py
+++ b/fanficfare/adapters/adapter_libraryofmoriacom.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class LibraryOfMoriaComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_lightnovelgatecom.py b/fanficfare/adapters/adapter_lightnovelgatecom.py
index 4489beae..e33d5f74 100644
--- a/fanficfare/adapters/adapter_lightnovelgatecom.py
+++ b/fanficfare/adapters/adapter_lightnovelgatecom.py
@@ -25,7 +25,7 @@ import re
import urllib2
import urlparse
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from bs4 import Comment
from ..htmlcleanup import removeEntities, stripHTML, fix_excess_space
diff --git a/fanficfare/adapters/adapter_literotica.py b/fanficfare/adapters/adapter_literotica.py
index 537bc288..0f9b9dd1 100644
--- a/fanficfare/adapters/adapter_literotica.py
+++ b/fanficfare/adapters/adapter_literotica.py
@@ -26,7 +26,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class LiteroticaSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_looselugscom.py b/fanficfare/adapters/adapter_looselugscom.py
index cd8b3302..72ef3709 100644
--- a/fanficfare/adapters/adapter_looselugscom.py
+++ b/fanficfare/adapters/adapter_looselugscom.py
@@ -22,7 +22,7 @@
### Original was adapter_fannation.py
##################################################################################
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class LooseLugsComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_lotrfanfictioncom.py b/fanficfare/adapters/adapter_lotrfanfictioncom.py
index e9f04dc3..6be94d75 100644
--- a/fanficfare/adapters/adapter_lotrfanfictioncom.py
+++ b/fanficfare/adapters/adapter_lotrfanfictioncom.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class TheLOTRFanFictionSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_lotrgficcom.py b/fanficfare/adapters/adapter_lotrgficcom.py
index b93c73f8..362824bb 100644
--- a/fanficfare/adapters/adapter_lotrgficcom.py
+++ b/fanficfare/adapters/adapter_lotrgficcom.py
@@ -28,7 +28,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class LOTRgficComAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_lumossycophanthexcom.py b/fanficfare/adapters/adapter_lumossycophanthexcom.py
index a3bf1c13..f8c59acc 100644
--- a/fanficfare/adapters/adapter_lumossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_lumossycophanthexcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return LumosSycophantHexComAdapter
diff --git a/fanficfare/adapters/adapter_masseffect2in.py b/fanficfare/adapters/adapter_masseffect2in.py
index 338fe488..1facf33e 100644
--- a/fanficfare/adapters/adapter_masseffect2in.py
+++ b/fanficfare/adapters/adapter_masseffect2in.py
@@ -24,7 +24,7 @@ import urllib2
from ..htmlcleanup import removeEntities, stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
_logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_mcstoriescom.py b/fanficfare/adapters/adapter_mcstoriescom.py
index bec4a240..289758cc 100644
--- a/fanficfare/adapters/adapter_mcstoriescom.py
+++ b/fanficfare/adapters/adapter_mcstoriescom.py
@@ -27,7 +27,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class MCStoriesComSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_mediaminerorg.py b/fanficfare/adapters/adapter_mediaminerorg.py
index 09de475d..eeee32c4 100644
--- a/fanficfare/adapters/adapter_mediaminerorg.py
+++ b/fanficfare/adapters/adapter_mediaminerorg.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class MediaMinerOrgSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_merlinficdtwinscouk.py b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
index 39b98ad5..7345eeae 100644
--- a/fanficfare/adapters/adapter_merlinficdtwinscouk.py
+++ b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return MerlinFicDtwinsCoUk
diff --git a/fanficfare/adapters/adapter_midnightwhispers.py b/fanficfare/adapters/adapter_midnightwhispers.py
index 27dbe46f..7f818744 100644
--- a/fanficfare/adapters/adapter_midnightwhispers.py
+++ b/fanficfare/adapters/adapter_midnightwhispers.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# Search for XXX comments--that's where things are most likely to need changing.
diff --git a/fanficfare/adapters/adapter_mttjustoncenet.py b/fanficfare/adapters/adapter_mttjustoncenet.py
index 097b6a04..ace5dd19 100644
--- a/fanficfare/adapters/adapter_mttjustoncenet.py
+++ b/fanficfare/adapters/adapter_mttjustoncenet.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class MTTJustOnceNetSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_naiceanilmenet.py b/fanficfare/adapters/adapter_naiceanilmenet.py
index a7a913fd..d6ccc27e 100644
--- a/fanficfare/adapters/adapter_naiceanilmenet.py
+++ b/fanficfare/adapters/adapter_naiceanilmenet.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class NaiceaNilmeNetAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_narutoficorg.py b/fanficfare/adapters/adapter_narutoficorg.py
index 09441d5f..e65a7666 100644
--- a/fanficfare/adapters/adapter_narutoficorg.py
+++ b/fanficfare/adapters/adapter_narutoficorg.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class NarutoFicOrgSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_nationallibrarynet.py b/fanficfare/adapters/adapter_nationallibrarynet.py
index 8c55f16e..61c77349 100644
--- a/fanficfare/adapters/adapter_nationallibrarynet.py
+++ b/fanficfare/adapters/adapter_nationallibrarynet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return NationalLibraryNetAdapter
diff --git a/fanficfare/adapters/adapter_ncisficcom.py b/fanficfare/adapters/adapter_ncisficcom.py
index c8f9953c..7f378448 100644
--- a/fanficfare/adapters/adapter_ncisficcom.py
+++ b/fanficfare/adapters/adapter_ncisficcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return NCISFicComAdapter
diff --git a/fanficfare/adapters/adapter_ncisfictioncom.py b/fanficfare/adapters/adapter_ncisfictioncom.py
index 8b989449..03746e5b 100644
--- a/fanficfare/adapters/adapter_ncisfictioncom.py
+++ b/fanficfare/adapters/adapter_ncisfictioncom.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class NCISFictionComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_nfacommunitycom.py b/fanficfare/adapters/adapter_nfacommunitycom.py
index 7371e6eb..f860c6a0 100644
--- a/fanficfare/adapters/adapter_nfacommunitycom.py
+++ b/fanficfare/adapters/adapter_nfacommunitycom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# Search for XXX comments--that's where things are most likely to need changing.
diff --git a/fanficfare/adapters/adapter_nhamagicalworldsus.py b/fanficfare/adapters/adapter_nhamagicalworldsus.py
index 214dc371..9b74d3e1 100644
--- a/fanficfare/adapters/adapter_nhamagicalworldsus.py
+++ b/fanficfare/adapters/adapter_nhamagicalworldsus.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
# Class name has to be unique. Our convention is camel case the
# sitename with Adapter at the end. www is skipped.
diff --git a/fanficfare/adapters/adapter_ninelivesarchivecom.py b/fanficfare/adapters/adapter_ninelivesarchivecom.py
index 13e052cc..e0d6eacc 100644
--- a/fanficfare/adapters/adapter_ninelivesarchivecom.py
+++ b/fanficfare/adapters/adapter_ninelivesarchivecom.py
@@ -18,7 +18,7 @@
# Software: eFiction
from __future__ import absolute_import
import re
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class NineLivesAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_nocturnallightnet.py b/fanficfare/adapters/adapter_nocturnallightnet.py
index 95edc0a3..65612154 100644
--- a/fanficfare/adapters/adapter_nocturnallightnet.py
+++ b/fanficfare/adapters/adapter_nocturnallightnet.py
@@ -7,7 +7,7 @@ import urlparse
from bs4.element import Tag
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
diff --git a/fanficfare/adapters/adapter_noveltrovecom.py b/fanficfare/adapters/adapter_noveltrovecom.py
index 2eb9f5c7..5948674e 100644
--- a/fanficfare/adapters/adapter_noveltrovecom.py
+++ b/fanficfare/adapters/adapter_noveltrovecom.py
@@ -26,7 +26,7 @@ import sys # ## used for debug purposes
import urllib2
import datetime
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
index e8e4b6cd..7e321405 100644
--- a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
+++ b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return OcclumencySycophantHexComAdapter
diff --git a/fanficfare/adapters/adapter_phoenixsongnet.py b/fanficfare/adapters/adapter_phoenixsongnet.py
index 5e5fd1eb..d84c1146 100644
--- a/fanficfare/adapters/adapter_phoenixsongnet.py
+++ b/fanficfare/adapters/adapter_phoenixsongnet.py
@@ -25,7 +25,7 @@ import urllib2, urllib, cookielib
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return PhoenixSongNetAdapter
diff --git a/fanficfare/adapters/adapter_ponyfictionarchivenet.py b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
index 39047c1f..8a6d5028 100644
--- a/fanficfare/adapters/adapter_ponyfictionarchivenet.py
+++ b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return PonyFictionArchiveNetAdapter
diff --git a/fanficfare/adapters/adapter_potionsandsnitches.py b/fanficfare/adapters/adapter_potionsandsnitches.py
index f7db6b91..142b2bdf 100644
--- a/fanficfare/adapters/adapter_potionsandsnitches.py
+++ b/fanficfare/adapters/adapter_potionsandsnitches.py
@@ -27,7 +27,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class PotionsAndSnitchesOrgSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_potterficscom.py b/fanficfare/adapters/adapter_potterficscom.py
index 654ee548..c7323227 100644
--- a/fanficfare/adapters/adapter_potterficscom.py
+++ b/fanficfare/adapters/adapter_potterficscom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter
+from .base_adapter import BaseSiteAdapter
# This function is called by the downloader in all adapter_*.py files
# in this dir to register the adapter class. So it needs to be
diff --git a/fanficfare/adapters/adapter_potterheadsanonymouscom.py b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
index 13556e8e..5aad67b5 100644
--- a/fanficfare/adapters/adapter_potterheadsanonymouscom.py
+++ b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return PotterHeadsAnonymousComAdapter
diff --git a/fanficfare/adapters/adapter_pretendercentrecom.py b/fanficfare/adapters/adapter_pretendercentrecom.py
index 640cb2f6..c6bc603d 100644
--- a/fanficfare/adapters/adapter_pretendercentrecom.py
+++ b/fanficfare/adapters/adapter_pretendercentrecom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return PretenderCenterComAdapter
diff --git a/fanficfare/adapters/adapter_qafficcom.py b/fanficfare/adapters/adapter_qafficcom.py
index c72d4a1b..4bea9eeb 100644
--- a/fanficfare/adapters/adapter_qafficcom.py
+++ b/fanficfare/adapters/adapter_qafficcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return QafFicComAdapter
diff --git a/fanficfare/adapters/adapter_quotevcom.py b/fanficfare/adapters/adapter_quotevcom.py
index 6fd27b63..588a5ea5 100644
--- a/fanficfare/adapters/adapter_quotevcom.py
+++ b/fanficfare/adapters/adapter_quotevcom.py
@@ -7,7 +7,7 @@ import urllib2
import datetime
from .. import exceptions
-from base_adapter import BaseSiteAdapter
+from .base_adapter import BaseSiteAdapter
from ..htmlcleanup import stripHTML
SITE_DOMAIN = 'quotev.com'
diff --git a/fanficfare/adapters/adapter_royalroadl.py b/fanficfare/adapters/adapter_royalroadl.py
index 5afd0747..44b6cfd9 100644
--- a/fanficfare/adapters/adapter_royalroadl.py
+++ b/fanficfare/adapters/adapter_royalroadl.py
@@ -26,7 +26,7 @@ import urllib2
from .. import exceptions as exceptions
from ..dateutils import parse_relative_date_string
from ..htmlcleanup import stripHTML
-from base_adapter import BaseSiteAdapter
+from .base_adapter import BaseSiteAdapter
logger = logging.getLogger(__name__)
diff --git a/fanficfare/adapters/adapter_samandjacknet.py b/fanficfare/adapters/adapter_samandjacknet.py
index 94d1b55d..372a7a29 100644
--- a/fanficfare/adapters/adapter_samandjacknet.py
+++ b/fanficfare/adapters/adapter_samandjacknet.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
# adapter_fanficcastletvnet.py is the best choice for learning to
diff --git a/fanficfare/adapters/adapter_scarvesandcoffeenet.py b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
index 3382594b..0b195f4c 100644
--- a/fanficfare/adapters/adapter_scarvesandcoffeenet.py
+++ b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ScarvesAndCoffeeNetAdapter
diff --git a/fanficfare/adapters/adapter_sebklainenet.py b/fanficfare/adapters/adapter_sebklainenet.py
index 8cbb8baf..05d05ae8 100644
--- a/fanficfare/adapters/adapter_sebklainenet.py
+++ b/fanficfare/adapters/adapter_sebklainenet.py
@@ -20,7 +20,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class SebklaineNeteOrgSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_sheppardweircom.py b/fanficfare/adapters/adapter_sheppardweircom.py
index 7c4b0131..f8eee2c9 100644
--- a/fanficfare/adapters/adapter_sheppardweircom.py
+++ b/fanficfare/adapters/adapter_sheppardweircom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
# adapter_fanficcastletvnet.py is the best choice for learning to
diff --git a/fanficfare/adapters/adapter_shriftweborgbfa.py b/fanficfare/adapters/adapter_shriftweborgbfa.py
index 6d53b733..d6fdbe77 100644
--- a/fanficfare/adapters/adapter_shriftweborgbfa.py
+++ b/fanficfare/adapters/adapter_shriftweborgbfa.py
@@ -28,7 +28,7 @@ import re
import urllib2
import sys
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py b/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
index 9cd84ad9..f69d31e8 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class SinfulDreamsComUnicornFic(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py b/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
index 143415ee..c8d868eb 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class SinfulDreamsComWhisperedMuse(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py b/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
index ca3bffb7..38ea9e04 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class SinfulDreamsComWickedTemptation(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_siyecouk.py b/fanficfare/adapters/adapter_siyecouk.py
index 70d4032f..34914c05 100644
--- a/fanficfare/adapters/adapter_siyecouk.py
+++ b/fanficfare/adapters/adapter_siyecouk.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
# This function is called by the downloader in all adapter_*.py files
# in this dir to register the adapter class. So it needs to be
diff --git a/fanficfare/adapters/adapter_spikeluvercom.py b/fanficfare/adapters/adapter_spikeluvercom.py
index c68339ae..6150644f 100644
--- a/fanficfare/adapters/adapter_spikeluvercom.py
+++ b/fanficfare/adapters/adapter_spikeluvercom.py
@@ -7,7 +7,7 @@ import urlparse
from bs4.element import Tag
from ..htmlcleanup import stripHTML
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
diff --git a/fanficfare/adapters/adapter_squidgeorgpeja.py b/fanficfare/adapters/adapter_squidgeorgpeja.py
index 4ad0c3d3..8e703f2f 100644
--- a/fanficfare/adapters/adapter_squidgeorgpeja.py
+++ b/fanficfare/adapters/adapter_squidgeorgpeja.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
diff --git a/fanficfare/adapters/adapter_starskyhutcharchivenet.py b/fanficfare/adapters/adapter_starskyhutcharchivenet.py
index f69194fc..331978a4 100644
--- a/fanficfare/adapters/adapter_starskyhutcharchivenet.py
+++ b/fanficfare/adapters/adapter_starskyhutcharchivenet.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class StarskyHutchArchiveNetSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_starslibrarynet.py b/fanficfare/adapters/adapter_starslibrarynet.py
index 30c7ec78..fa060ae0 100644
--- a/fanficfare/adapters/adapter_starslibrarynet.py
+++ b/fanficfare/adapters/adapter_starslibrarynet.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class StarsLibraryNetAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_storiesofardacom.py b/fanficfare/adapters/adapter_storiesofardacom.py
index 2cd1dd15..8154191b 100644
--- a/fanficfare/adapters/adapter_storiesofardacom.py
+++ b/fanficfare/adapters/adapter_storiesofardacom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return StoriesOfArdaComAdapter
diff --git a/fanficfare/adapters/adapter_storiesonlinenet.py b/fanficfare/adapters/adapter_storiesonlinenet.py
index b770f282..4ada3b52 100644
--- a/fanficfare/adapters/adapter_storiesonlinenet.py
+++ b/fanficfare/adapters/adapter_storiesonlinenet.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return StoriesOnlineNetAdapter
diff --git a/fanficfare/adapters/adapter_sugarquillnet.py b/fanficfare/adapters/adapter_sugarquillnet.py
index 32e04b0c..739f92bf 100644
--- a/fanficfare/adapters/adapter_sugarquillnet.py
+++ b/fanficfare/adapters/adapter_sugarquillnet.py
@@ -37,7 +37,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return SugarQuillNetAdapter
diff --git a/fanficfare/adapters/adapter_swordborderlineangelcom.py b/fanficfare/adapters/adapter_swordborderlineangelcom.py
index 41d7a3c3..e58274bf 100644
--- a/fanficfare/adapters/adapter_swordborderlineangelcom.py
+++ b/fanficfare/adapters/adapter_swordborderlineangelcom.py
@@ -19,7 +19,7 @@
from __future__ import absolute_import
from ..htmlcleanup import stripHTML
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class SwordBorderlineAngelComSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py b/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
index 14a3b020..0a6c717b 100644
--- a/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
+++ b/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class TasteOfPoisonInkubationNetAdapter(BaseEfictionAdapter):
''' This adapter will download stories from the
diff --git a/fanficfare/adapters/adapter_tenhawkpresentscom.py b/fanficfare/adapters/adapter_tenhawkpresentscom.py
index 31049278..e9a5da95 100644
--- a/fanficfare/adapters/adapter_tenhawkpresentscom.py
+++ b/fanficfare/adapters/adapter_tenhawkpresentscom.py
@@ -27,7 +27,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class TenhawkPresentsComSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_tgstorytimecom.py b/fanficfare/adapters/adapter_tgstorytimecom.py
index 38e9e695..ddc950f9 100644
--- a/fanficfare/adapters/adapter_tgstorytimecom.py
+++ b/fanficfare/adapters/adapter_tgstorytimecom.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class TGStorytimeComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_thebrokenworldorg.py b/fanficfare/adapters/adapter_thebrokenworldorg.py
index a5614b4a..a32c350a 100644
--- a/fanficfare/adapters/adapter_thebrokenworldorg.py
+++ b/fanficfare/adapters/adapter_thebrokenworldorg.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class TheBrokenWorldOrgSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_thedelphicexpansecom.py b/fanficfare/adapters/adapter_thedelphicexpansecom.py
index 6c4972b1..77a7ebc2 100644
--- a/fanficfare/adapters/adapter_thedelphicexpansecom.py
+++ b/fanficfare/adapters/adapter_thedelphicexpansecom.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class TheDelphicExpanseComAdapter(BaseEfictionAdapter):
''' This adapter will download stories from the
diff --git a/fanficfare/adapters/adapter_thehookupzonenet.py b/fanficfare/adapters/adapter_thehookupzonenet.py
index 6aa7da74..30aca527 100644
--- a/fanficfare/adapters/adapter_thehookupzonenet.py
+++ b/fanficfare/adapters/adapter_thehookupzonenet.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class TheHookupZoneNetAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_themaplebookshelf.py b/fanficfare/adapters/adapter_themaplebookshelf.py
index fdf58f22..05ea1f59 100644
--- a/fanficfare/adapters/adapter_themaplebookshelf.py
+++ b/fanficfare/adapters/adapter_themaplebookshelf.py
@@ -17,7 +17,7 @@
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class TheMapleBookshelfComSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_themasquenet.py b/fanficfare/adapters/adapter_themasquenet.py
index 23e5ac4c..c914b975 100644
--- a/fanficfare/adapters/adapter_themasquenet.py
+++ b/fanficfare/adapters/adapter_themasquenet.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return TheMasqueNetAdapter
diff --git a/fanficfare/adapters/adapter_thepetulantpoetesscom.py b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
index 834882f3..436eccbd 100644
--- a/fanficfare/adapters/adapter_thepetulantpoetesscom.py
+++ b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return ThePetulantPoetessComAdapter
diff --git a/fanficfare/adapters/adapter_thundercatsfansorg.py b/fanficfare/adapters/adapter_thundercatsfansorg.py
index b1157dbe..05beaf1e 100644
--- a/fanficfare/adapters/adapter_thundercatsfansorg.py
+++ b/fanficfare/adapters/adapter_thundercatsfansorg.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class ThundercatsFansOrgSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_tolkienfanfiction.py b/fanficfare/adapters/adapter_tolkienfanfiction.py
index ba79b2e5..2455b1ad 100644
--- a/fanficfare/adapters/adapter_tolkienfanfiction.py
+++ b/fanficfare/adapters/adapter_tolkienfanfiction.py
@@ -68,7 +68,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def _is_story_url(url):
return "Story_Read_Head.php" in url
diff --git a/fanficfare/adapters/adapter_tomparisdormcom.py b/fanficfare/adapters/adapter_tomparisdormcom.py
index e5b03133..fd3a584b 100644
--- a/fanficfare/adapters/adapter_tomparisdormcom.py
+++ b/fanficfare/adapters/adapter_tomparisdormcom.py
@@ -27,7 +27,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return TomParisDormComAdapter
diff --git a/fanficfare/adapters/adapter_trekfanfictionnet.py b/fanficfare/adapters/adapter_trekfanfictionnet.py
index aec0bb77..e92fbc63 100644
--- a/fanficfare/adapters/adapter_trekfanfictionnet.py
+++ b/fanficfare/adapters/adapter_trekfanfictionnet.py
@@ -30,7 +30,7 @@ import logging
import re
import urllib2
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_trekiverseorg.py b/fanficfare/adapters/adapter_trekiverseorg.py
index fd9ccf46..e13b6b69 100644
--- a/fanficfare/adapters/adapter_trekiverseorg.py
+++ b/fanficfare/adapters/adapter_trekiverseorg.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return TrekiverseOrgAdapter
diff --git a/fanficfare/adapters/adapter_tthfanficorg.py b/fanficfare/adapters/adapter_tthfanficorg.py
index 0af384be..c7d07f6c 100644
--- a/fanficfare/adapters/adapter_tthfanficorg.py
+++ b/fanficfare/adapters/adapter_tthfanficorg.py
@@ -23,7 +23,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class TwistingTheHellmouthSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_twilightarchivescom.py b/fanficfare/adapters/adapter_twilightarchivescom.py
index f322f85a..80ff8cb3 100644
--- a/fanficfare/adapters/adapter_twilightarchivescom.py
+++ b/fanficfare/adapters/adapter_twilightarchivescom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return TwilightArchivesComAdapter
diff --git a/fanficfare/adapters/adapter_twilightednet.py b/fanficfare/adapters/adapter_twilightednet.py
index a2cab28b..e0803659 100644
--- a/fanficfare/adapters/adapter_twilightednet.py
+++ b/fanficfare/adapters/adapter_twilightednet.py
@@ -27,7 +27,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class TwilightedNetSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_unknowableroomorg.py b/fanficfare/adapters/adapter_unknowableroomorg.py
index 487422a0..4fb51199 100644
--- a/fanficfare/adapters/adapter_unknowableroomorg.py
+++ b/fanficfare/adapters/adapter_unknowableroomorg.py
@@ -24,7 +24,7 @@ import re
import urllib2
import sys
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_valentchambercom.py b/fanficfare/adapters/adapter_valentchambercom.py
index 4f20227a..0d067759 100644
--- a/fanficfare/adapters/adapter_valentchambercom.py
+++ b/fanficfare/adapters/adapter_valentchambercom.py
@@ -22,7 +22,7 @@
### Original was adapter_fannation.py
##################################################################################
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class ValentChamberComAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_voracity2eficcom.py b/fanficfare/adapters/adapter_voracity2eficcom.py
index a8dcee63..e1108ee9 100644
--- a/fanficfare/adapters/adapter_voracity2eficcom.py
+++ b/fanficfare/adapters/adapter_voracity2eficcom.py
@@ -6,7 +6,7 @@ import urlparse
from bs4.element import Tag
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
diff --git a/fanficfare/adapters/adapter_walkingtheplankorg.py b/fanficfare/adapters/adapter_walkingtheplankorg.py
index d1e05c40..930d9920 100644
--- a/fanficfare/adapters/adapter_walkingtheplankorg.py
+++ b/fanficfare/adapters/adapter_walkingtheplankorg.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return WalkingThePlankOrgAdapter
diff --git a/fanficfare/adapters/adapter_webnovelcom.py b/fanficfare/adapters/adapter_webnovelcom.py
index 0f43b590..38e61944 100644
--- a/fanficfare/adapters/adapter_webnovelcom.py
+++ b/fanficfare/adapters/adapter_webnovelcom.py
@@ -25,7 +25,7 @@ import re
import time
import urllib2
-from base_adapter import BaseSiteAdapter
+from .base_adapter import BaseSiteAdapter
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
from ..dateutils import parse_relative_date_string
diff --git a/fanficfare/adapters/adapter_whoficcom.py b/fanficfare/adapters/adapter_whoficcom.py
index 7d83e015..517c73ed 100644
--- a/fanficfare/adapters/adapter_whoficcom.py
+++ b/fanficfare/adapters/adapter_whoficcom.py
@@ -26,7 +26,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
class WhoficComSiteAdapter(BaseSiteAdapter):
diff --git a/fanficfare/adapters/adapter_wolverineandroguecom.py b/fanficfare/adapters/adapter_wolverineandroguecom.py
index 1cc5d999..f2733bf5 100644
--- a/fanficfare/adapters/adapter_wolverineandroguecom.py
+++ b/fanficfare/adapters/adapter_wolverineandroguecom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return WolverineAndRogueComAdapter
diff --git a/fanficfare/adapters/adapter_wraithbaitcom.py b/fanficfare/adapters/adapter_wraithbaitcom.py
index 2d6d1b93..491424bc 100644
--- a/fanficfare/adapters/adapter_wraithbaitcom.py
+++ b/fanficfare/adapters/adapter_wraithbaitcom.py
@@ -25,7 +25,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
diff --git a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
index 1ff0101e..6ca24192 100644
--- a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
+++ b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
@@ -27,7 +27,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return WritingWhimsicalwanderingsNetAdapter
diff --git a/fanficfare/adapters/adapter_wuxiaworldco.py b/fanficfare/adapters/adapter_wuxiaworldco.py
index 3f19b5aa..b5d69105 100644
--- a/fanficfare/adapters/adapter_wuxiaworldco.py
+++ b/fanficfare/adapters/adapter_wuxiaworldco.py
@@ -22,7 +22,7 @@ import re
import urllib2
import urlparse
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from fanficfare.htmlcleanup import stripHTML
from .. import exceptions as exceptions
diff --git a/fanficfare/adapters/adapter_wuxiaworldcom.py b/fanficfare/adapters/adapter_wuxiaworldcom.py
index 6b5afebf..83a11ba9 100644
--- a/fanficfare/adapters/adapter_wuxiaworldcom.py
+++ b/fanficfare/adapters/adapter_wuxiaworldcom.py
@@ -23,7 +23,7 @@ import re
import urllib2
import urlparse
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
diff --git a/fanficfare/adapters/adapter_www13hoursorg.py b/fanficfare/adapters/adapter_www13hoursorg.py
index 25a98ec8..7e5cff09 100644
--- a/fanficfare/adapters/adapter_www13hoursorg.py
+++ b/fanficfare/adapters/adapter_www13hoursorg.py
@@ -19,7 +19,7 @@ from __future__ import absolute_import
from ..htmlcleanup import stripHTML
# Software: eFiction
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class WWW13HoursOrgSiteAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_wwwaneroticstorycom.py b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
index f399c530..9743106b 100644
--- a/fanficfare/adapters/adapter_wwwaneroticstorycom.py
+++ b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
@@ -25,7 +25,7 @@ import urlparse
from bs4.element import Comment
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
diff --git a/fanficfare/adapters/adapter_wwwarea52hkhnet.py b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
index 86d669e5..b0133952 100644
--- a/fanficfare/adapters/adapter_wwwarea52hkhnet.py
+++ b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
@@ -33,7 +33,7 @@ import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return WWWArea52HKHNetAdapter
diff --git a/fanficfare/adapters/adapter_wwwgiantessworldnet.py b/fanficfare/adapters/adapter_wwwgiantessworldnet.py
index 6ebe972b..9945e205 100644
--- a/fanficfare/adapters/adapter_wwwgiantessworldnet.py
+++ b/fanficfare/adapters/adapter_wwwgiantessworldnet.py
@@ -19,7 +19,7 @@
###########################################################################
# Software: eFiction
from __future__ import absolute_import
-from base_efiction_adapter import BaseEfictionAdapter
+from .base_efiction_adapter import BaseEfictionAdapter
class WWWGiantessworldNetAdapter(BaseEfictionAdapter):
diff --git a/fanficfare/adapters/adapter_wwwlushstoriescom.py b/fanficfare/adapters/adapter_wwwlushstoriescom.py
index 1ed17ea6..038b7e8f 100644
--- a/fanficfare/adapters/adapter_wwwlushstoriescom.py
+++ b/fanficfare/adapters/adapter_wwwlushstoriescom.py
@@ -30,7 +30,7 @@ from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
from bs4 import Comment, BeautifulSoup
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
####################################################################################################
def getClass():
diff --git a/fanficfare/adapters/adapter_wwwnovelallcom.py b/fanficfare/adapters/adapter_wwwnovelallcom.py
index 055d49f6..00e58f8d 100644
--- a/fanficfare/adapters/adapter_wwwnovelallcom.py
+++ b/fanficfare/adapters/adapter_wwwnovelallcom.py
@@ -26,7 +26,7 @@ import json
import urllib2
import urlparse
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
from bs4 import Comment
from ..htmlcleanup import removeEntities, stripHTML, fix_excess_space
diff --git a/fanficfare/adapters/adapter_wwwutopiastoriescom.py b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
index a13bdf8d..8226e970 100644
--- a/fanficfare/adapters/adapter_wwwutopiastoriescom.py
+++ b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
@@ -39,7 +39,7 @@ from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
-from base_adapter import BaseSiteAdapter, makeDate
+from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
return WWWUtopiastoriesComAdapter
From b3ce28bc99cb366eab7e42434cbc75b4def2a590 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 15:09:23 -0500
Subject: [PATCH 027/120] Ignore .bak files.
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index 16929968..93f22dcf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,6 +15,7 @@
# usually perl -pi.back -e edits.
*.back
+*.bak
cleanup.sh
FanFictionDownLoader.zip
From bfd1f8907eaec2aa4f8d23b1128940e15ced7dae Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 17:54:54 -0500
Subject: [PATCH 028/120] py2/py3 transition code
---
fanficfare/adapters/__init__.py | 297 +++++++++---------
.../adapters/adapter_adastrafanficcom.py | 11 +-
.../adapters/adapter_adultfanfictionorg.py | 13 +-
.../adapters/adapter_alternatehistorycom.py | 4 +-
.../adapters/adapter_andromedawebcom.py | 13 +-
.../adapters/adapter_archiveofourownorg.py | 9 +-
.../adapters/adapter_archiveskyehawkecom.py | 11 +-
fanficfare/adapters/adapter_artemisfowlcom.py | 13 +-
fanficfare/adapters/adapter_asexstoriescom.py | 11 +-
.../adapter_ashwindersycophanthexcom.py | 11 +-
.../adapters/adapter_asianfanficscom.py | 8 +-
.../adapters/adapter_asr3slashzoneorg.py | 13 +-
fanficfare/adapters/adapter_bdsmlibrarycom.py | 14 +-
.../adapters/adapter_bloodshedversecom.py | 9 +-
.../adapters/adapter_bloodtiesfancom.py | 12 +-
fanficfare/adapters/adapter_buffygilescom.py | 13 +-
.../adapters/adapter_chaossycophanthexcom.py | 11 +-
.../adapters/adapter_chosentwofanficcom.py | 9 +-
.../adapters/adapter_csiforensicscom.py | 11 +-
fanficfare/adapters/adapter_darksolaceorg.py | 2 +-
fanficfare/adapters/adapter_deandamagecom.py | 2 +-
.../adapters/adapter_deepinmysoulnet.py | 13 +-
.../adapters/adapter_destinysgatewaycom.py | 13 +-
fanficfare/adapters/adapter_dokugacom.py | 11 +-
.../adapters/adapter_dracoandginnycom.py | 13 +-
fanficfare/adapters/adapter_dramioneorg.py | 10 +-
.../adapters/adapter_efictionestelielde.py | 11 +-
fanficfare/adapters/adapter_efpfanficnet.py | 11 +-
.../adapter_erosnsapphosycophanthexcom.py | 13 +-
.../adapters/adapter_fanficauthorsnet.py | 9 +-
.../adapters/adapter_fanficcastletvnet.py | 13 +-
fanficfare/adapters/adapter_fanfichu.py | 10 +-
.../adapters/adapter_fanfictionjunkiesde.py | 11 +-
fanficfare/adapters/adapter_fanfiktionde.py | 11 +-
fanficfare/adapters/adapter_fannation.py | 2 +-
fanficfare/adapters/adapter_fhsarchivecom.py | 2 +-
fanficfare/adapters/adapter_ficbooknet.py | 9 +-
.../adapters/adapter_fictionalleyorg.py | 11 +-
fanficfare/adapters/adapter_fictionhuntcom.py | 10 +-
fanficfare/adapters/adapter_fictionmaniatv.py | 8 +-
fanficfare/adapters/adapter_fictionpadcom.py | 9 +-
.../adapters/adapter_fictionpresscom.py | 9 +-
fanficfare/adapters/adapter_ficwadcom.py | 12 +-
fanficfare/adapters/adapter_fimfictionnet.py | 11 +-
fanficfare/adapters/adapter_finestoriescom.py | 8 +-
fanficfare/adapters/adapter_fireflyfansnet.py | 8 +-
.../adapters/adapter_fireflypopulliorg.py | 9 +-
.../adapter_forumquestionablequestingcom.py | 2 +-
.../adapters/adapter_gluttonyfictioncom.py | 4 +-
.../adapters/adapter_gravitytalescom.py | 9 +-
.../adapter_harrypotterfanfictioncom.py | 11 +-
fanficfare/adapters/adapter_hlfictionnet.py | 11 +-
.../adapters/adapter_hpfanficarchivecom.py | 10 +-
fanficfare/adapters/adapter_iketernalnet.py | 12 +-
fanficfare/adapters/adapter_imagineeficcom.py | 13 +-
.../adapters/adapter_imrightbehindyoucom.py | 2 +-
fanficfare/adapters/adapter_inkbunnynet.py | 9 +-
.../adapters/adapter_itcouldhappennet.py | 2 +-
.../adapter_kiarepositorymujajinet.py | 13 +-
fanficfare/adapters/adapter_ksarchivecom.py | 13 +-
fanficfare/adapters/adapter_lcfanficcom.py | 9 +-
.../adapters/adapter_libraryofmoriacom.py | 2 +-
.../adapters/adapter_lightnovelgatecom.py | 8 +-
fanficfare/adapters/adapter_literotica.py | 13 +-
fanficfare/adapters/adapter_looselugscom.py | 4 +-
.../adapters/adapter_lotrfanfictioncom.py | 2 +-
fanficfare/adapters/adapter_lotrgficcom.py | 10 +-
.../adapters/adapter_lumossycophanthexcom.py | 11 +-
fanficfare/adapters/adapter_masseffect2in.py | 12 +-
fanficfare/adapters/adapter_mcstoriescom.py | 11 +-
fanficfare/adapters/adapter_mediaminerorg.py | 10 +-
.../adapters/adapter_merlinficdtwinscouk.py | 13 +-
.../adapters/adapter_midnightwhispers.py | 13 +-
fanficfare/adapters/adapter_mttjustoncenet.py | 2 +-
fanficfare/adapters/adapter_naiceanilmenet.py | 2 +-
fanficfare/adapters/adapter_narutoficorg.py | 2 +-
.../adapters/adapter_nationallibrarynet.py | 11 +-
fanficfare/adapters/adapter_ncisficcom.py | 11 +-
fanficfare/adapters/adapter_ncisfictioncom.py | 2 +-
.../adapters/adapter_nfacommunitycom.py | 13 +-
.../adapters/adapter_nhamagicalworldsus.py | 2 +-
.../adapters/adapter_ninelivesarchivecom.py | 2 +-
.../adapters/adapter_nocturnallightnet.py | 10 +-
fanficfare/adapters/adapter_noveltrovecom.py | 9 +-
.../adapter_occlumencysycophanthexcom.py | 11 +-
fanficfare/adapters/adapter_phoenixsongnet.py | 10 +-
.../adapters/adapter_ponyfictionarchivenet.py | 13 +-
.../adapters/adapter_potionsandsnitches.py | 11 +-
fanficfare/adapters/adapter_potterficscom.py | 11 +-
.../adapter_potterheadsanonymouscom.py | 13 +-
.../adapters/adapter_pretendercentrecom.py | 13 +-
fanficfare/adapters/adapter_qafficcom.py | 13 +-
fanficfare/adapters/adapter_quotevcom.py | 9 +-
fanficfare/adapters/adapter_royalroadl.py | 11 +-
fanficfare/adapters/adapter_samandjacknet.py | 13 +-
.../adapters/adapter_scarvesandcoffeenet.py | 13 +-
fanficfare/adapters/adapter_sebklainenet.py | 2 +-
.../adapters/adapter_sheppardweircom.py | 11 +-
.../adapters/adapter_shriftweborgbfa.py | 9 +-
.../adapter_sinfuldreamscomunicornfic.py | 2 +-
.../adapter_sinfuldreamscomwhisperedmuse.py | 2 +-
...adapter_sinfuldreamscomwickedtemptation.py | 2 +-
fanficfare/adapters/adapter_siyecouk.py | 11 +-
fanficfare/adapters/adapter_spikeluvercom.py | 9 +-
fanficfare/adapters/adapter_squidgeorgpeja.py | 11 +-
.../adapter_starskyhutcharchivenet.py | 2 +-
.../adapters/adapter_starslibrarynet.py | 2 +-
.../adapters/adapter_storiesofardacom.py | 11 +-
.../adapters/adapter_storiesonlinenet.py | 14 +-
fanficfare/adapters/adapter_sugarquillnet.py | 11 +-
.../adapter_swordborderlineangelcom.py | 2 +-
.../adapter_tasteofpoisoninkubationnet.py | 2 +-
.../adapters/adapter_tenhawkpresentscom.py | 11 +-
fanficfare/adapters/adapter_test1.py | 2 +-
fanficfare/adapters/adapter_tgstorytimecom.py | 2 +-
.../adapters/adapter_thebrokenworldorg.py | 2 +-
.../adapters/adapter_thedelphicexpansecom.py | 2 +-
.../adapters/adapter_thehookupzonenet.py | 2 +-
.../adapters/adapter_themaplebookshelf.py | 2 +-
fanficfare/adapters/adapter_themasquenet.py | 13 +-
.../adapters/adapter_thepetulantpoetesscom.py | 11 +-
.../adapters/adapter_thundercatsfansorg.py | 2 +-
.../adapters/adapter_tolkienfanfiction.py | 17 +-
.../adapters/adapter_tomparisdormcom.py | 9 +-
.../adapters/adapter_trekfanfictionnet.py | 8 +-
fanficfare/adapters/adapter_trekiverseorg.py | 13 +-
fanficfare/adapters/adapter_tthfanficorg.py | 13 +-
.../adapters/adapter_twilightarchivescom.py | 11 +-
fanficfare/adapters/adapter_twilightednet.py | 11 +-
.../adapters/adapter_unknowableroomorg.py | 9 +-
.../adapters/adapter_valentchambercom.py | 4 +-
.../adapters/adapter_voracity2eficcom.py | 9 +-
.../adapters/adapter_walkingtheplankorg.py | 11 +-
fanficfare/adapters/adapter_wattpadcom.py | 2 +-
fanficfare/adapters/adapter_webnovelcom.py | 6 +-
fanficfare/adapters/adapter_whoficcom.py | 11 +-
.../adapters/adapter_wolverineandroguecom.py | 11 +-
fanficfare/adapters/adapter_wraithbaitcom.py | 11 +-
.../adapter_writingwhimsicalwanderingsnet.py | 9 +-
fanficfare/adapters/adapter_wuxiaworldco.py | 8 +-
fanficfare/adapters/adapter_wuxiaworldcom.py | 8 +-
fanficfare/adapters/adapter_www13hoursorg.py | 2 +-
.../adapters/adapter_wwwaneroticstorycom.py | 12 +-
.../adapters/adapter_wwwarea52hkhnet.py | 10 +-
.../adapters/adapter_wwwgiantessworldnet.py | 2 +-
.../adapters/adapter_wwwlushstoriescom.py | 16 +-
fanficfare/adapters/adapter_wwwnovelallcom.py | 8 +-
.../adapters/adapter_wwwutopiastoriescom.py | 13 +-
fanficfare/adapters/base_adapter.py | 2 +-
.../adapters/base_xenforoforum_adapter.py | 6 +-
fanficfare/dateutils.py | 2 +-
fanficfare/geturls.py | 4 +-
fanficfare/writers/__init__.py | 2 +-
153 files changed, 904 insertions(+), 698 deletions(-)
diff --git a/fanficfare/adapters/__init__.py b/fanficfare/adapters/__init__.py
index 2ef031d7..f4b36bbb 100644
--- a/fanficfare/adapters/__init__.py
+++ b/fanficfare/adapters/__init__.py
@@ -19,6 +19,9 @@ from __future__ import absolute_import
import os, re, sys, glob, types
from os.path import dirname, basename, normpath
import logging
+
+# py2 vs py3 transition
+from ..six import text_type as unicode
from ..six.moves.urllib.parse import urlparse
logger = logging.getLogger(__name__)
@@ -31,154 +34,154 @@ from .. import configurable as configurable
from . import base_efiction_adapter
from . import adapter_test1
from . import adapter_fanfictionnet
-# from . import adapter_fanficcastletvnet
-# from . import adapter_fictionalleyorg
-# from . import adapter_fictionpresscom
-# from . import adapter_ficwadcom
-# from . import adapter_fimfictionnet
-# from . import adapter_mediaminerorg
-# from . import adapter_potionsandsnitches
-# from . import adapter_tenhawkpresentscom
-# from . import adapter_adastrafanficcom
-# from . import adapter_tthfanficorg
-# from . import adapter_twilightednet
-# from . import adapter_whoficcom
-# from . import adapter_siyecouk
-# from . import adapter_archiveofourownorg
-# from . import adapter_ficbooknet
-# from . import adapter_nfacommunitycom
-# from . import adapter_midnightwhispers
-# from . import adapter_ksarchivecom
-# from . import adapter_archiveskyehawkecom
-# from . import adapter_squidgeorgpeja
-# from . import adapter_libraryofmoriacom
-# from . import adapter_wraithbaitcom
-# from . import adapter_dramioneorg
-# from . import adapter_ashwindersycophanthexcom
-# from . import adapter_chaossycophanthexcom
-# from . import adapter_erosnsapphosycophanthexcom
-# from . import adapter_lumossycophanthexcom
-# from . import adapter_occlumencysycophanthexcom
-# from . import adapter_phoenixsongnet
-# from . import adapter_walkingtheplankorg
-# from . import adapter_dokugacom
-# from . import adapter_iketernalnet
-# from . import adapter_storiesofardacom
-# from . import adapter_destinysgatewaycom
-# from . import adapter_ncisfictioncom
-# from . import adapter_fanfiktionde
-# from . import adapter_ponyfictionarchivenet
-# from . import adapter_ncisficcom
-# from . import adapter_nationallibrarynet
-# from . import adapter_themasquenet
-# from . import adapter_pretendercentrecom
-# from . import adapter_darksolaceorg
-# from . import adapter_finestoriescom
-# from . import adapter_hpfanficarchivecom
-# from . import adapter_twilightarchivescom
-# from . import adapter_nhamagicalworldsus
-# from . import adapter_hlfictionnet
-# from . import adapter_dracoandginnycom
-# from . import adapter_scarvesandcoffeenet
-# from . import adapter_thepetulantpoetesscom
-# from . import adapter_wolverineandroguecom
-# from . import adapter_merlinficdtwinscouk
-# from . import adapter_thehookupzonenet
-# from . import adapter_bloodtiesfancom
-# from . import adapter_qafficcom
-# from . import adapter_efpfanficnet
-# from . import adapter_potterficscom
-# from . import adapter_efictionestelielde
-# from . import adapter_imagineeficcom
-# from . import adapter_asr3slashzoneorg
-# from . import adapter_potterheadsanonymouscom
-# from . import adapter_fictionpadcom
-# from . import adapter_storiesonlinenet
-# from . import adapter_trekiverseorg
-# from . import adapter_literotica
-# from . import adapter_voracity2eficcom
-# from . import adapter_spikeluvercom
-# from . import adapter_bloodshedversecom
-# from . import adapter_nocturnallightnet
-# from . import adapter_fanfichu
-# from . import adapter_fictionmaniatv
-# from . import adapter_tolkienfanfiction
-# from . import adapter_themaplebookshelf
-# from . import adapter_fannation
-# from . import adapter_sheppardweircom
-# from . import adapter_samandjacknet
-# from . import adapter_csiforensicscom
-# from . import adapter_lotrfanfictioncom
-# from . import adapter_fhsarchivecom
-# from . import adapter_fanfictionjunkiesde
-# from . import adapter_tgstorytimecom
-# from . import adapter_itcouldhappennet
+from . import adapter_fanficcastletvnet
+from . import adapter_fictionalleyorg
+from . import adapter_fictionpresscom
+from . import adapter_ficwadcom
+from . import adapter_fimfictionnet
+from . import adapter_mediaminerorg
+from . import adapter_potionsandsnitches
+from . import adapter_tenhawkpresentscom
+from . import adapter_adastrafanficcom
+from . import adapter_tthfanficorg
+from . import adapter_twilightednet
+from . import adapter_whoficcom
+from . import adapter_siyecouk
+from . import adapter_archiveofourownorg
+from . import adapter_ficbooknet
+from . import adapter_nfacommunitycom
+from . import adapter_midnightwhispers
+from . import adapter_ksarchivecom
+from . import adapter_archiveskyehawkecom
+from . import adapter_squidgeorgpeja
+from . import adapter_libraryofmoriacom
+from . import adapter_wraithbaitcom
+from . import adapter_dramioneorg
+from . import adapter_ashwindersycophanthexcom
+from . import adapter_chaossycophanthexcom
+from . import adapter_erosnsapphosycophanthexcom
+from . import adapter_lumossycophanthexcom
+from . import adapter_occlumencysycophanthexcom
+from . import adapter_phoenixsongnet
+from . import adapter_walkingtheplankorg
+from . import adapter_dokugacom
+from . import adapter_iketernalnet
+from . import adapter_storiesofardacom
+from . import adapter_destinysgatewaycom
+from . import adapter_ncisfictioncom
+from . import adapter_fanfiktionde
+from . import adapter_ponyfictionarchivenet
+from . import adapter_ncisficcom
+from . import adapter_nationallibrarynet
+from . import adapter_themasquenet
+from . import adapter_pretendercentrecom
+from . import adapter_darksolaceorg
+from . import adapter_finestoriescom
+from . import adapter_hpfanficarchivecom
+from . import adapter_twilightarchivescom
+from . import adapter_nhamagicalworldsus
+from . import adapter_hlfictionnet
+from . import adapter_dracoandginnycom
+from . import adapter_scarvesandcoffeenet
+from . import adapter_thepetulantpoetesscom
+from . import adapter_wolverineandroguecom
+from . import adapter_merlinficdtwinscouk
+from . import adapter_thehookupzonenet
+from . import adapter_bloodtiesfancom
+from . import adapter_qafficcom
+from . import adapter_efpfanficnet
+from . import adapter_potterficscom
+from . import adapter_efictionestelielde
+from . import adapter_imagineeficcom
+from . import adapter_asr3slashzoneorg
+from . import adapter_potterheadsanonymouscom
+from . import adapter_fictionpadcom
+from . import adapter_storiesonlinenet
+from . import adapter_trekiverseorg
+from . import adapter_literotica
+from . import adapter_voracity2eficcom
+from . import adapter_spikeluvercom
+from . import adapter_bloodshedversecom
+from . import adapter_nocturnallightnet
+from . import adapter_fanfichu
+from . import adapter_fictionmaniatv
+from . import adapter_tolkienfanfiction
+from . import adapter_themaplebookshelf
+from . import adapter_fannation
+from . import adapter_sheppardweircom
+from . import adapter_samandjacknet
+from . import adapter_csiforensicscom
+from . import adapter_lotrfanfictioncom
+from . import adapter_fhsarchivecom
+from . import adapter_fanfictionjunkiesde
+from . import adapter_tgstorytimecom
+from . import adapter_itcouldhappennet
from . import adapter_forumsspacebattlescom
-# from . import adapter_forumssufficientvelocitycom
-# from . import adapter_forumquestionablequestingcom
-# from . import adapter_ninelivesarchivecom
-# from . import adapter_masseffect2in
-# from . import adapter_quotevcom
-# from . import adapter_mcstoriescom
-# from . import adapter_buffygilescom
-# from . import adapter_andromedawebcom
-# from . import adapter_artemisfowlcom
-# from . import adapter_naiceanilmenet
-# from . import adapter_deepinmysoulnet
-# from . import adapter_kiarepositorymujajinet
-# from . import adapter_adultfanfictionorg
-# from . import adapter_fictionhuntcom
-# from . import adapter_royalroadl
-# from . import adapter_chosentwofanficcom
-# from . import adapter_bdsmlibrarycom
-# from . import adapter_asexstoriescom
-# from . import adapter_gluttonyfictioncom
-# from . import adapter_valentchambercom
-# from . import adapter_looselugscom
-# from . import adapter_wwwgiantessworldnet
-# from . import adapter_lotrgficcom
-# from . import adapter_tomparisdormcom
-# from . import adapter_writingwhimsicalwanderingsnet
-# from . import adapter_sugarquillnet
-# from . import adapter_wwwarea52hkhnet
-# from . import adapter_starslibrarynet
-# from . import adapter_fanficauthorsnet
-# from . import adapter_fireflyfansnet
-# from . import adapter_fireflypopulliorg
-# from . import adapter_sebklainenet
-# from . import adapter_shriftweborgbfa
-# from . import adapter_trekfanfictionnet
-# from . import adapter_wuxiaworldcom
-# from . import adapter_wwwlushstoriescom
-# from . import adapter_wwwutopiastoriescom
-# from . import adapter_sinfuldreamscomunicornfic
-# from . import adapter_sinfuldreamscomwhisperedmuse
-# from . import adapter_sinfuldreamscomwickedtemptation
-# from . import adapter_asianfanficscom
-# from . import adapter_webnovelcom
-# from . import adapter_deandamagecom
-# from . import adapter_imrightbehindyoucom
-# from . import adapter_mttjustoncenet
-# from . import adapter_narutoficorg
-# from . import adapter_starskyhutcharchivenet
-# from . import adapter_swordborderlineangelcom
-# from . import adapter_tasteofpoisoninkubationnet
-# from . import adapter_thebrokenworldorg
-# from . import adapter_thedelphicexpansecom
-# from . import adapter_thundercatsfansorg
-# from . import adapter_unknowableroomorg
-# from . import adapter_www13hoursorg
-# from . import adapter_wwwaneroticstorycom
-# from . import adapter_gravitytalescom
-# from . import adapter_lcfanficcom
-# from . import adapter_noveltrovecom
-# from . import adapter_inkbunnynet
-# from . import adapter_alternatehistorycom
-# from . import adapter_wattpadcom
-# from . import adapter_lightnovelgatecom
-# from . import adapter_wwwnovelallcom
-# from . import adapter_wuxiaworldco
-# from . import adapter_harrypotterfanfictioncom
+from . import adapter_forumssufficientvelocitycom
+from . import adapter_forumquestionablequestingcom
+from . import adapter_ninelivesarchivecom
+from . import adapter_masseffect2in
+from . import adapter_quotevcom
+from . import adapter_mcstoriescom
+from . import adapter_buffygilescom
+from . import adapter_andromedawebcom
+from . import adapter_artemisfowlcom
+from . import adapter_naiceanilmenet
+from . import adapter_deepinmysoulnet
+from . import adapter_kiarepositorymujajinet
+from . import adapter_adultfanfictionorg
+from . import adapter_fictionhuntcom
+from . import adapter_royalroadl
+from . import adapter_chosentwofanficcom
+from . import adapter_bdsmlibrarycom
+from . import adapter_asexstoriescom
+from . import adapter_gluttonyfictioncom
+from . import adapter_valentchambercom
+from . import adapter_looselugscom
+from . import adapter_wwwgiantessworldnet
+from . import adapter_lotrgficcom
+from . import adapter_tomparisdormcom
+from . import adapter_writingwhimsicalwanderingsnet
+from . import adapter_sugarquillnet
+from . import adapter_wwwarea52hkhnet
+from . import adapter_starslibrarynet
+from . import adapter_fanficauthorsnet
+from . import adapter_fireflyfansnet
+from . import adapter_fireflypopulliorg
+from . import adapter_sebklainenet
+from . import adapter_shriftweborgbfa
+from . import adapter_trekfanfictionnet
+from . import adapter_wuxiaworldcom
+from . import adapter_wwwlushstoriescom
+from . import adapter_wwwutopiastoriescom
+from . import adapter_sinfuldreamscomunicornfic
+from . import adapter_sinfuldreamscomwhisperedmuse
+from . import adapter_sinfuldreamscomwickedtemptation
+from . import adapter_asianfanficscom
+from . import adapter_webnovelcom
+from . import adapter_deandamagecom
+from . import adapter_imrightbehindyoucom
+from . import adapter_mttjustoncenet
+from . import adapter_narutoficorg
+from . import adapter_starskyhutcharchivenet
+from . import adapter_swordborderlineangelcom
+from . import adapter_tasteofpoisoninkubationnet
+from . import adapter_thebrokenworldorg
+from . import adapter_thedelphicexpansecom
+from . import adapter_thundercatsfansorg
+from . import adapter_unknowableroomorg
+from . import adapter_www13hoursorg
+from . import adapter_wwwaneroticstorycom
+from . import adapter_gravitytalescom
+from . import adapter_lcfanficcom
+from . import adapter_noveltrovecom
+from . import adapter_inkbunnynet
+from . import adapter_alternatehistorycom
+from . import adapter_wattpadcom
+from . import adapter_lightnovelgatecom
+from . import adapter_wwwnovelallcom
+from . import adapter_wuxiaworldco
+from . import adapter_harrypotterfanfictioncom
## This bit of complexity allows adapters to be added by just adding
## importing. It eliminates the long if/else clauses we used to need
diff --git a/fanficfare/adapters/adapter_adastrafanficcom.py b/fanficfare/adapters/adapter_adastrafanficcom.py
index f8da90e4..895ad39e 100644
--- a/fanficfare/adapters/adapter_adastrafanficcom.py
+++ b/fanficfare/adapters/adapter_adastrafanficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,15 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib
-import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six import string_types as basestring
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class AdAstraFanficComSiteAdapter(BaseSiteAdapter):
@@ -73,7 +76,7 @@ class AdAstraFanficComSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_adultfanfictionorg.py b/fanficfare/adapters/adapter_adultfanfictionorg.py
index 2776ffe9..80789adf 100644
--- a/fanficfare/adapters/adapter_adultfanfictionorg.py
+++ b/fanficfare/adapters/adapter_adultfanfictionorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# -- coding: utf-8 --
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -23,12 +23,15 @@ import logging
logger = logging.getLogger(__name__)
import re
import sys
-import urllib2
from bs4 import UnicodeDammit
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
################################################################################
@@ -199,7 +202,7 @@ class AdultFanFictionOrgAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist("Code: 404. {0}".format(url))
elif e.code == 410:
@@ -265,7 +268,7 @@ class AdultFanFictionOrgAdapter(BaseSiteAdapter):
logger.debug('Getting the author page: {0}'.format(author_Url))
try:
adata = self._fetchUrl(author_Url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in 404:
raise exceptions.StoryDoesNotExist("Author Page: Code: 404. {0}".format(author_Url))
elif e.code == 410:
@@ -303,7 +306,7 @@ class AdultFanFictionOrgAdapter(BaseSiteAdapter):
logger.debug('Getting the author page: {0}'.format(author_Url))
try:
adata = self._fetchUrl(author_Url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in 404:
raise exceptions.StoryDoesNotExist("Author Page: Code: 404. {0}".format(author_Url))
elif e.code == 410:
diff --git a/fanficfare/adapters/adapter_alternatehistorycom.py b/fanficfare/adapters/adapter_alternatehistorycom.py
index f7824377..c3d13649 100644
--- a/fanficfare/adapters/adapter_alternatehistorycom.py
+++ b/fanficfare/adapters/adapter_alternatehistorycom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,7 +16,7 @@
#
from __future__ import absolute_import
-from adapter_forumquestionablequestingcom import QuestionablequestingComAdapter
+from .adapter_forumquestionablequestingcom import QuestionablequestingComAdapter
def getClass():
return WWWAlternatehistoryComAdapter
diff --git a/fanficfare/adapters/adapter_andromedawebcom.py b/fanficfare/adapters/adapter_andromedawebcom.py
index 4a35e11e..c0f6e223 100644
--- a/fanficfare/adapters/adapter_andromedawebcom.py
+++ b/fanficfare/adapters/adapter_andromedawebcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,12 +22,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -125,7 +126,7 @@ class AndromedaWebComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -159,7 +160,7 @@ class AndromedaWebComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_archiveofourownorg.py b/fanficfare/adapters/adapter_archiveofourownorg.py
index 489a824e..4e8cbfa2 100644
--- a/fanficfare/adapters/adapter_archiveofourownorg.py
+++ b/fanficfare/adapters/adapter_archiveofourownorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,15 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import json
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -155,7 +158,7 @@ class ArchiveOfOurOwnOrgAdapter(BaseSiteAdapter):
if "This work could have adult content. If you proceed you have agreed that you are willing to see such content." in meta:
raise exceptions.AdultCheckRequired(self.url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_archiveskyehawkecom.py b/fanficfare/adapters/adapter_archiveskyehawkecom.py
index b2307635..459a0416 100644
--- a/fanficfare/adapters/adapter_archiveskyehawkecom.py
+++ b/fanficfare/adapters/adapter_archiveskyehawkecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
@@ -80,7 +81,7 @@ class ArchiveSkyeHawkeComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_artemisfowlcom.py b/fanficfare/adapters/adapter_artemisfowlcom.py
index e446cb2e..a6dd1e5a 100644
--- a/fanficfare/adapters/adapter_artemisfowlcom.py
+++ b/fanficfare/adapters/adapter_artemisfowlcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,12 +22,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -125,7 +126,7 @@ class ArtemisFowlComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -159,7 +160,7 @@ class ArtemisFowlComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_asexstoriescom.py b/fanficfare/adapters/adapter_asexstoriescom.py
index 3b1d0490..b470c311 100644
--- a/fanficfare/adapters/adapter_asexstoriescom.py
+++ b/fanficfare/adapters/adapter_asexstoriescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,8 +19,6 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-import urlparse
import os
from bs4.element import Comment
@@ -28,6 +26,11 @@ from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
import sys
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -83,7 +86,7 @@ class ASexStoriesComAdapter(BaseSiteAdapter):
soup1 = self.make_soup(data1)
#strip comments from soup
[comment.extract() for comment in soup1.find_all(text=lambda text:isinstance(text, Comment))]
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
index 7cb8ff93..699171bf 100644
--- a/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
+++ b/fanficfare/adapters/adapter_ashwindersycophanthexcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -115,7 +116,7 @@ class AshwinderSycophantHexComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_asianfanficscom.py b/fanficfare/adapters/adapter_asianfanficscom.py
index a14818b3..2d65d2df 100644
--- a/fanficfare/adapters/adapter_asianfanficscom.py
+++ b/fanficfare/adapters/adapter_asianfanficscom.py
@@ -4,11 +4,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -125,7 +127,7 @@ class AsianFanFicsComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_asr3slashzoneorg.py b/fanficfare/adapters/adapter_asr3slashzoneorg.py
index 7c678559..91515376 100644
--- a/fanficfare/adapters/adapter_asr3slashzoneorg.py
+++ b/fanficfare/adapters/adapter_asr3slashzoneorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -85,7 +86,7 @@ class Asr3SlashzoneOrgAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -105,7 +106,7 @@ class Asr3SlashzoneOrgAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_bdsmlibrarycom.py b/fanficfare/adapters/adapter_bdsmlibrarycom.py
index 305c6a51..824c821a 100644
--- a/fanficfare/adapters/adapter_bdsmlibrarycom.py
+++ b/fanficfare/adapters/adapter_bdsmlibrarycom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -51,14 +51,16 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
import sys
-import urlparse
-
from bs4 import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -111,7 +113,7 @@ class BDSMLibraryComSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(self.url)
soup = self.make_soup(data)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -133,7 +135,7 @@ class BDSMLibraryComSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(self.url)
soup = self.make_soup(data)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_bloodshedversecom.py b/fanficfare/adapters/adapter_bloodshedversecom.py
index 4c3cb87a..ace8eca2 100644
--- a/fanficfare/adapters/adapter_bloodshedversecom.py
+++ b/fanficfare/adapters/adapter_bloodshedversecom.py
@@ -1,8 +1,6 @@
from __future__ import absolute_import
from datetime import timedelta
import re
-import urllib2
-import urlparse
import logging
logger = logging.getLogger(__name__)
@@ -10,6 +8,11 @@ logger = logging.getLogger(__name__)
from bs4 import BeautifulSoup
from ..htmlcleanup import stripHTML
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
@@ -48,7 +51,7 @@ class BloodshedverseComAdapter(BaseSiteAdapter):
if exception:
try:
data = self._fetchUrl(url, parameters)
- except urllib2.HTTPError:
+ except HTTPError:
raise exception(self.url)
# Just let self._fetchUrl throw the exception, don't catch and
# customize it.
diff --git a/fanficfare/adapters/adapter_bloodtiesfancom.py b/fanficfare/adapters/adapter_bloodtiesfancom.py
index c8f35a42..a6bc2c3c 100644
--- a/fanficfare/adapters/adapter_bloodtiesfancom.py
+++ b/fanficfare/adapters/adapter_bloodtiesfancom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,14 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
from bs4.element import Tag
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
@@ -150,7 +152,7 @@ class BloodTiesFansComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -184,7 +186,7 @@ class BloodTiesFansComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_buffygilescom.py b/fanficfare/adapters/adapter_buffygilescom.py
index f1eb696b..af50231c 100644
--- a/fanficfare/adapters/adapter_buffygilescom.py
+++ b/fanficfare/adapters/adapter_buffygilescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -123,7 +124,7 @@ class BuffyGilesComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -157,7 +158,7 @@ class BuffyGilesComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_chaossycophanthexcom.py b/fanficfare/adapters/adapter_chaossycophanthexcom.py
index 448ae1a8..bee50b0f 100644
--- a/fanficfare/adapters/adapter_chaossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_chaossycophanthexcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -88,7 +89,7 @@ class ChaosSycophantHexComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_chosentwofanficcom.py b/fanficfare/adapters/adapter_chosentwofanficcom.py
index f7e0b984..ac9bdd5c 100644
--- a/fanficfare/adapters/adapter_chosentwofanficcom.py
+++ b/fanficfare/adapters/adapter_chosentwofanficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,13 +20,16 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import sys
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -85,7 +88,7 @@ class ChosenTwoFanFicArchiveAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_csiforensicscom.py b/fanficfare/adapters/adapter_csiforensicscom.py
index 2358ed8b..bab51511 100644
--- a/fanficfare/adapters/adapter_csiforensicscom.py
+++ b/fanficfare/adapters/adapter_csiforensicscom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
@@ -87,7 +88,7 @@ class CSIForensicsComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_darksolaceorg.py b/fanficfare/adapters/adapter_darksolaceorg.py
index 173b3548..5e49a78c 100644
--- a/fanficfare/adapters/adapter_darksolaceorg.py
+++ b/fanficfare/adapters/adapter_darksolaceorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_deandamagecom.py b/fanficfare/adapters/adapter_deandamagecom.py
index 9f0c1446..a10cfeb3 100644
--- a/fanficfare/adapters/adapter_deandamagecom.py
+++ b/fanficfare/adapters/adapter_deandamagecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_deepinmysoulnet.py b/fanficfare/adapters/adapter_deepinmysoulnet.py
index b690ae94..678b3ee1 100644
--- a/fanficfare/adapters/adapter_deepinmysoulnet.py
+++ b/fanficfare/adapters/adapter_deepinmysoulnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -123,7 +124,7 @@ class DeepInMySoulNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -157,7 +158,7 @@ class DeepInMySoulNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_destinysgatewaycom.py b/fanficfare/adapters/adapter_destinysgatewaycom.py
index db5f4cec..5b9c63df 100644
--- a/fanficfare/adapters/adapter_destinysgatewaycom.py
+++ b/fanficfare/adapters/adapter_destinysgatewaycom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -88,7 +89,7 @@ class DestinysGatewayComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -108,7 +109,7 @@ class DestinysGatewayComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_dokugacom.py b/fanficfare/adapters/adapter_dokugacom.py
index dc19fe5c..f98eaa16 100644
--- a/fanficfare/adapters/adapter_dokugacom.py
+++ b/fanficfare/adapters/adapter_dokugacom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -126,7 +127,7 @@ class DokugaComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_dracoandginnycom.py b/fanficfare/adapters/adapter_dracoandginnycom.py
index 2876faf8..b37cd90c 100644
--- a/fanficfare/adapters/adapter_dracoandginnycom.py
+++ b/fanficfare/adapters/adapter_dracoandginnycom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -122,7 +123,7 @@ class DracoAndGinnyComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -147,7 +148,7 @@ class DracoAndGinnyComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_dramioneorg.py b/fanficfare/adapters/adapter_dramioneorg.py
index c626115f..ce7a0ae4 100644
--- a/fanficfare/adapters/adapter_dramioneorg.py
+++ b/fanficfare/adapters/adapter_dramioneorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,14 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
from bs4.element import Tag
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -122,7 +124,7 @@ class DramioneOrgAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_efictionestelielde.py b/fanficfare/adapters/adapter_efictionestelielde.py
index 66a0fce9..81bacbde 100644
--- a/fanficfare/adapters/adapter_efictionestelielde.py
+++ b/fanficfare/adapters/adapter_efictionestelielde.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -79,7 +80,7 @@ class EfictionEstelielDeAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_efpfanficnet.py b/fanficfare/adapters/adapter_efpfanficnet.py
index 962f3d7f..a16d01e2 100644
--- a/fanficfare/adapters/adapter_efpfanficnet.py
+++ b/fanficfare/adapters/adapter_efpfanficnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -110,7 +111,7 @@ class EFPFanFicNet(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
index cd40a9fb..ad341981 100644
--- a/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
+++ b/fanficfare/adapters/adapter_erosnsapphosycophanthexcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -88,7 +89,7 @@ class ErosnSapphoSycophantHexComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -108,7 +109,7 @@ class ErosnSapphoSycophantHexComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fanficauthorsnet.py b/fanficfare/adapters/adapter_fanficauthorsnet.py
index 1bfe586c..83f2341c 100644
--- a/fanficfare/adapters/adapter_fanficauthorsnet.py
+++ b/fanficfare/adapters/adapter_fanficauthorsnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# -- coding: utf-8 --
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,12 +24,15 @@ import logging
logger = logging.getLogger(__name__)
import re
import sys
-import urllib2
from bs4 import UnicodeDammit, Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
####################################################################################################
@@ -157,7 +160,7 @@ class FanficAuthorsNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url+'index/', params, usecache=False)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist("Code: 404. {0}".format(url))
elif e.code == 410:
diff --git a/fanficfare/adapters/adapter_fanficcastletvnet.py b/fanficfare/adapters/adapter_fanficcastletvnet.py
index 61d8f0a2..dce86a4a 100644
--- a/fanficfare/adapters/adapter_fanficcastletvnet.py
+++ b/fanficfare/adapters/adapter_fanficcastletvnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# In general an 'adapter' needs to do these five things:
@@ -138,7 +139,7 @@ class FanficCastleTVNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -163,7 +164,7 @@ class FanficCastleTVNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fanfichu.py b/fanficfare/adapters/adapter_fanfichu.py
index 3e59d121..293077a3 100644
--- a/fanficfare/adapters/adapter_fanfichu.py
+++ b/fanficfare/adapters/adapter_fanfichu.py
@@ -1,6 +1,6 @@
# coding=utf-8
-# Copyright 2014 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,8 +17,10 @@
from __future__ import absolute_import
import re
-import urllib2
-import urlparse
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
@@ -62,7 +64,7 @@ class FanficHuAdapter(BaseSiteAdapter):
if exception:
try:
data = self._fetchUrl(url, parameters)
- except urllib2.HTTPError:
+ except HTTPError:
raise exception(self.url)
# Just let self._fetchUrl throw the exception, don't catch and
# customize it.
diff --git a/fanficfare/adapters/adapter_fanfictionjunkiesde.py b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
index 523ab771..d8c95dfc 100644
--- a/fanficfare/adapters/adapter_fanfictionjunkiesde.py
+++ b/fanficfare/adapters/adapter_fanfictionjunkiesde.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
@@ -147,7 +148,7 @@ class FanfictionJunkiesDeAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fanfiktionde.py b/fanficfare/adapters/adapter_fanfiktionde.py
index 632185ba..1f47f842 100644
--- a/fanficfare/adapters/adapter_fanfiktionde.py
+++ b/fanficfare/adapters/adapter_fanfiktionde.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,13 +21,14 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -119,7 +120,7 @@ class FanFiktionDeAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fannation.py b/fanficfare/adapters/adapter_fannation.py
index 6a74ac0f..d34beaf8 100644
--- a/fanficfare/adapters/adapter_fannation.py
+++ b/fanficfare/adapters/adapter_fannation.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_fhsarchivecom.py b/fanficfare/adapters/adapter_fhsarchivecom.py
index a9b0c809..1eed35e8 100644
--- a/fanficfare/adapters/adapter_fhsarchivecom.py
+++ b/fanficfare/adapters/adapter_fhsarchivecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_ficbooknet.py b/fanficfare/adapters/adapter_ficbooknet.py
index e4000552..afac38e7 100644
--- a/fanficfare/adapters/adapter_ficbooknet.py
+++ b/fanficfare/adapters/adapter_ficbooknet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,13 +20,16 @@ import datetime
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
from .. import translit
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
@@ -77,7 +80,7 @@ class FicBookNetAdapter(BaseSiteAdapter):
logger.debug("URL: "+url)
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fictionalleyorg.py b/fanficfare/adapters/adapter_fictionalleyorg.py
index ba6f32d4..d7bee1df 100644
--- a/fanficfare/adapters/adapter_fictionalleyorg.py
+++ b/fanficfare/adapters/adapter_fictionalleyorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class FictionAlleyOrgSiteAdapter(BaseSiteAdapter):
@@ -80,7 +81,7 @@ class FictionAlleyOrgSiteAdapter(BaseSiteAdapter):
try:
data = self._postFetchWithIAmOld(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fictionhuntcom.py b/fanficfare/adapters/adapter_fictionhuntcom.py
index 98dcbe03..de1d203c 100644
--- a/fanficfare/adapters/adapter_fictionhuntcom.py
+++ b/fanficfare/adapters/adapter_fictionhuntcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2016 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,11 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class FictionHuntComSiteAdapter(BaseSiteAdapter):
@@ -69,7 +71,7 @@ class FictionHuntComSiteAdapter(BaseSiteAdapter):
url = self.url
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.meta)
else:
diff --git a/fanficfare/adapters/adapter_fictionmaniatv.py b/fanficfare/adapters/adapter_fictionmaniatv.py
index a79e7685..b3d64df9 100644
--- a/fanficfare/adapters/adapter_fictionmaniatv.py
+++ b/fanficfare/adapters/adapter_fictionmaniatv.py
@@ -1,7 +1,9 @@
from __future__ import absolute_import
import re
-import urllib2
-import urlparse
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -45,7 +47,7 @@ class FictionManiaTVAdapter(BaseSiteAdapter):
if exception:
try:
data = self._fetchUrl(url, parameters)
- except urllib2.HTTPError:
+ except HTTPError:
raise exception(self.url)
# Just let self._fetchUrl throw the exception, don't catch and
# customize it.
diff --git a/fanficfare/adapters/adapter_fictionpadcom.py b/fanficfare/adapters/adapter_fictionpadcom.py
index a0066082..9d8dc4e4 100644
--- a/fanficfare/adapters/adapter_fictionpadcom.py
+++ b/fanficfare/adapters/adapter_fictionpadcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,13 +19,16 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import json
#from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class FictionPadSiteAdapter(BaseSiteAdapter):
@@ -122,7 +125,7 @@ class FictionPadSiteAdapter(BaseSiteAdapter):
data = data[:data.rindex(";")]
data = data.replace('tables:','"tables":')
tables = json.loads(data)['tables']
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(url)
else:
diff --git a/fanficfare/adapters/adapter_fictionpresscom.py b/fanficfare/adapters/adapter_fictionpresscom.py
index 8d5cef68..132e291f 100644
--- a/fanficfare/adapters/adapter_fictionpresscom.py
+++ b/fanficfare/adapters/adapter_fictionpresscom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,9 +19,12 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
+
+# py2 vs py3 transition
+from ..six import text_type as unicode
+
## They're from the same people and pretty much identical.
-from adapter_fanfictionnet import FanFictionNetSiteAdapter
+from .adapter_fanfictionnet import FanFictionNetSiteAdapter
class FictionPressComSiteAdapter(FanFictionNetSiteAdapter):
diff --git a/fanficfare/adapters/adapter_ficwadcom.py b/fanficfare/adapters/adapter_ficwadcom.py
index 725584da..aad2d0ea 100644
--- a/fanficfare/adapters/adapter_ficwadcom.py
+++ b/fanficfare/adapters/adapter_ficwadcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,14 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-import httplib, urllib
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class FicwadComSiteAdapter(BaseSiteAdapter):
@@ -95,7 +97,7 @@ class FicwadComSiteAdapter(BaseSiteAdapter):
if "
Featured Story
" in data:
raise exceptions.StoryDoesNotExist(self.url)
soup = self.make_soup(data)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -117,7 +119,7 @@ class FicwadComSiteAdapter(BaseSiteAdapter):
self._setURL(url)
try:
soup = self.make_soup(self._fetchUrl(url))
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fimfictionnet.py b/fanficfare/adapters/adapter_fimfictionnet.py
index 88c4000a..444e6b68 100644
--- a/fanficfare/adapters/adapter_fimfictionnet.py
+++ b/fanficfare/adapters/adapter_fimfictionnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,13 +21,16 @@ from datetime import date, datetime
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-import cookielib as cl
import json
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+from ..six.moves import http_cookiejar as cl
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -98,7 +101,7 @@ class FimFictionNetSiteAdapter(BaseSiteAdapter):
data = self.do_fix_blockquotes(self._fetchUrl(self.url,
usecache=(not self.is_adult)))
soup = self.make_soup(data)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_finestoriescom.py b/fanficfare/adapters/adapter_finestoriescom.py
index 2c66b756..81be7f5a 100644
--- a/fanficfare/adapters/adapter_finestoriescom.py
+++ b/fanficfare/adapters/adapter_finestoriescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -18,7 +18,11 @@
from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
-from adapter_storiesonlinenet import StoriesOnlineNetAdapter
+
+# py2 vs py3 transition
+from ..six import text_type as unicode
+
+from .adapter_storiesonlinenet import StoriesOnlineNetAdapter
def getClass():
return FineStoriesComAdapter
diff --git a/fanficfare/adapters/adapter_fireflyfansnet.py b/fanficfare/adapters/adapter_fireflyfansnet.py
index 27ae6b44..989e5b4b 100644
--- a/fanficfare/adapters/adapter_fireflyfansnet.py
+++ b/fanficfare/adapters/adapter_fireflyfansnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,7 +22,9 @@ from __future__ import absolute_import
import logging
import re
import sys
-import urllib2
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -79,7 +81,7 @@ class FireFlyFansNetSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_fireflypopulliorg.py b/fanficfare/adapters/adapter_fireflypopulliorg.py
index 48b45cc8..50f6cf40 100644
--- a/fanficfare/adapters/adapter_fireflypopulliorg.py
+++ b/fanficfare/adapters/adapter_fireflypopulliorg.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -25,9 +25,12 @@ from __future__ import absolute_import
''' This adapter scrapes the metadata and chapter text from stories on firefly.populli.org '''
import logging
import re
-import urllib2
import sys
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
@@ -98,7 +101,7 @@ class FireflyPopulliOrgSiteAdapter(BaseSiteAdapter):
'''
try:
page_data = self._fetchUrl(page)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(page))
else:
diff --git a/fanficfare/adapters/adapter_forumquestionablequestingcom.py b/fanficfare/adapters/adapter_forumquestionablequestingcom.py
index d67b2627..59c6d534 100644
--- a/fanficfare/adapters/adapter_forumquestionablequestingcom.py
+++ b/fanficfare/adapters/adapter_forumquestionablequestingcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_gluttonyfictioncom.py b/fanficfare/adapters/adapter_gluttonyfictioncom.py
index b9ee9207..adfc6439 100644
--- a/fanficfare/adapters/adapter_gluttonyfictioncom.py
+++ b/fanficfare/adapters/adapter_gluttonyfictioncom.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 FanFicFare team
-# Copyright 2016 FanFicFare team
+# Copyright 2018 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_gravitytalescom.py b/fanficfare/adapters/adapter_gravitytalescom.py
index c2551217..7ca0b57a 100644
--- a/fanficfare/adapters/adapter_gravitytalescom.py
+++ b/fanficfare/adapters/adapter_gravitytalescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,7 +21,6 @@
from __future__ import absolute_import
import logging
import re
-import urllib2
from datetime import datetime
logger = logging.getLogger(__name__)
@@ -37,6 +36,10 @@ except ImportError:
# logger.warn('No version of feedparser module available, falling back to naive published and updated date')
feedparser = None
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter
from .. import exceptions as exceptions
@@ -93,7 +96,7 @@ class GravityTalesComSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('Error 404: {0}'.format(self.url))
else:
diff --git a/fanficfare/adapters/adapter_harrypotterfanfictioncom.py b/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
index d95133a1..8a709e38 100644
--- a/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
+++ b/fanficfare/adapters/adapter_harrypotterfanfictioncom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class HarryPotterFanFictionComSiteAdapter(BaseSiteAdapter):
@@ -70,7 +71,7 @@ class HarryPotterFanFictionComSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_hlfictionnet.py b/fanficfare/adapters/adapter_hlfictionnet.py
index c1bc6338..0292fb8a 100644
--- a/fanficfare/adapters/adapter_hlfictionnet.py
+++ b/fanficfare/adapters/adapter_hlfictionnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -78,7 +79,7 @@ class HLFictionNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_hpfanficarchivecom.py b/fanficfare/adapters/adapter_hpfanficarchivecom.py
index 7820405e..e0e597e2 100644
--- a/fanficfare/adapters/adapter_hpfanficarchivecom.py
+++ b/fanficfare/adapters/adapter_hpfanficarchivecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,14 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -78,7 +80,7 @@ class HPFanficArchiveComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_iketernalnet.py b/fanficfare/adapters/adapter_iketernalnet.py
index 7a2027c1..68000aeb 100644
--- a/fanficfare/adapters/adapter_iketernalnet.py
+++ b/fanficfare/adapters/adapter_iketernalnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,11 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -121,7 +123,7 @@ class IkEternalNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -159,7 +161,7 @@ class IkEternalNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_imagineeficcom.py b/fanficfare/adapters/adapter_imagineeficcom.py
index e1ba1be2..c80b895d 100644
--- a/fanficfare/adapters/adapter_imagineeficcom.py
+++ b/fanficfare/adapters/adapter_imagineeficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -122,7 +123,7 @@ class ImagineEFicComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -147,7 +148,7 @@ class ImagineEFicComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_imrightbehindyoucom.py b/fanficfare/adapters/adapter_imrightbehindyoucom.py
index 48fbd3bc..ba1c47db 100644
--- a/fanficfare/adapters/adapter_imrightbehindyoucom.py
+++ b/fanficfare/adapters/adapter_imrightbehindyoucom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_inkbunnynet.py b/fanficfare/adapters/adapter_inkbunnynet.py
index 6c4d718f..1b6c2c5b 100644
--- a/fanficfare/adapters/adapter_inkbunnynet.py
+++ b/fanficfare/adapters/adapter_inkbunnynet.py
@@ -22,14 +22,17 @@ from __future__ import absolute_import
import logging
import re
import sys
-import urllib2
from datetime import datetime, timedelta
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
-UNIX_EPOCHE = datetime.fromtimestamp(0)
+UNIX_EPOCHE = datetime.fromtimestamp(86400)
logger = logging.getLogger(__name__)
@@ -123,7 +126,7 @@ class InkBunnyNetSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('Error 404: {0}'.format(self.url))
else:
diff --git a/fanficfare/adapters/adapter_itcouldhappennet.py b/fanficfare/adapters/adapter_itcouldhappennet.py
index 75d6171a..1bd03f15 100644
--- a/fanficfare/adapters/adapter_itcouldhappennet.py
+++ b/fanficfare/adapters/adapter_itcouldhappennet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_kiarepositorymujajinet.py b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
index ec4bfbd1..8a8ba785 100644
--- a/fanficfare/adapters/adapter_kiarepositorymujajinet.py
+++ b/fanficfare/adapters/adapter_kiarepositorymujajinet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -123,7 +124,7 @@ class KiaRepositoryMujajiNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -157,7 +158,7 @@ class KiaRepositoryMujajiNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_ksarchivecom.py b/fanficfare/adapters/adapter_ksarchivecom.py
index 9837e9c6..299fa9f7 100644
--- a/fanficfare/adapters/adapter_ksarchivecom.py
+++ b/fanficfare/adapters/adapter_ksarchivecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# Search for XXX comments--that's where things are most likely to need changing.
@@ -101,7 +102,7 @@ class KSArchiveComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -134,7 +135,7 @@ class KSArchiveComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_lcfanficcom.py b/fanficfare/adapters/adapter_lcfanficcom.py
index d0088687..6faadd8b 100644
--- a/fanficfare/adapters/adapter_lcfanficcom.py
+++ b/fanficfare/adapters/adapter_lcfanficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -23,9 +23,12 @@ import logging
import json
import re
import sys # ## used for debug purposes
-import urllib2
import datetime
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
@@ -96,7 +99,7 @@ class LCFanFicComSiteAdapter(BaseSiteAdapter):
url = self.url
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('Error 404: {0}'.format(self.url))
else:
diff --git a/fanficfare/adapters/adapter_libraryofmoriacom.py b/fanficfare/adapters/adapter_libraryofmoriacom.py
index 88d7a847..adefe3b5 100644
--- a/fanficfare/adapters/adapter_libraryofmoriacom.py
+++ b/fanficfare/adapters/adapter_libraryofmoriacom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_lightnovelgatecom.py b/fanficfare/adapters/adapter_lightnovelgatecom.py
index e33d5f74..9c2fe519 100644
--- a/fanficfare/adapters/adapter_lightnovelgatecom.py
+++ b/fanficfare/adapters/adapter_lightnovelgatecom.py
@@ -22,8 +22,10 @@
from __future__ import absolute_import
import logging
import re
-import urllib2
-import urlparse
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -96,7 +98,7 @@ class LightNovelGateSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(url))
else:
diff --git a/fanficfare/adapters/adapter_literotica.py b/fanficfare/adapters/adapter_literotica.py
index 0f9b9dd1..0422ae75 100644
--- a/fanficfare/adapters/adapter_literotica.py
+++ b/fanficfare/adapters/adapter_literotica.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,13 +19,16 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-import urlparse
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class LiteroticaSiteAdapter(BaseSiteAdapter):
@@ -132,7 +135,7 @@ class LiteroticaSiteAdapter(BaseSiteAdapter):
soup1 = self.make_soup(data1)
#strip comments from soup
[comment.extract() for comment in soup1.findAll(text=lambda text:isinstance(text, Comment))]
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in [404, 410]:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -157,7 +160,7 @@ class LiteroticaSiteAdapter(BaseSiteAdapter):
#strip comments from soup
[comment.extract() for comment in soupAuth.findAll(text=lambda text:isinstance(text, Comment))]
# logger.debug(soupAuth)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in [404, 410]:
raise exceptions.StoryDoesNotExist(authorurl)
else:
diff --git a/fanficfare/adapters/adapter_looselugscom.py b/fanficfare/adapters/adapter_looselugscom.py
index 72ef3709..9d72db0c 100644
--- a/fanficfare/adapters/adapter_looselugscom.py
+++ b/fanficfare/adapters/adapter_looselugscom.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 FanFicFare team
-# Copyright 2016 FanFicFare team
+# Copyright 2018 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_lotrfanfictioncom.py b/fanficfare/adapters/adapter_lotrfanfictioncom.py
index 6be94d75..a57e7d83 100644
--- a/fanficfare/adapters/adapter_lotrfanfictioncom.py
+++ b/fanficfare/adapters/adapter_lotrfanfictioncom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_lotrgficcom.py b/fanficfare/adapters/adapter_lotrgficcom.py
index 362824bb..ffda5d6b 100644
--- a/fanficfare/adapters/adapter_lotrgficcom.py
+++ b/fanficfare/adapters/adapter_lotrgficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -23,11 +23,13 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class LOTRgficComAdapter(BaseSiteAdapter):
@@ -79,7 +81,7 @@ class LOTRgficComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_lumossycophanthexcom.py b/fanficfare/adapters/adapter_lumossycophanthexcom.py
index f8c59acc..6b8f1c84 100644
--- a/fanficfare/adapters/adapter_lumossycophanthexcom.py
+++ b/fanficfare/adapters/adapter_lumossycophanthexcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -88,7 +89,7 @@ class LumosSycophantHexComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_masseffect2in.py b/fanficfare/adapters/adapter_masseffect2in.py
index 1facf33e..3de5f4e7 100644
--- a/fanficfare/adapters/adapter_masseffect2in.py
+++ b/fanficfare/adapters/adapter_masseffect2in.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,10 +20,12 @@ import bs4
import datetime
import logging
import re
-import urllib2
-
from ..htmlcleanup import removeEntities, stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
@@ -115,7 +117,7 @@ class MassEffect2InAdapter(BaseSiteAdapter):
try:
startingChapter = self._makeChapter(self.url)
- except urllib2.HTTPError, error:
+ except HTTPError as error:
if error.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
raise
@@ -199,7 +201,7 @@ class MassEffect2InAdapter(BaseSiteAdapter):
chapterTitle = re.sub(garbagePattern, u'', chapter.getHeading()[chapterTitleStart:])
self.add_chapter(chapterTitle, url)
- except ParsingError, error:
+ except ParsingError as error:
raise exceptions.FailedToDownload(u"Failed to download chapter `%s': %s" % (url, error))
# Some metadata are handled separately due to format conversions.
diff --git a/fanficfare/adapters/adapter_mcstoriescom.py b/fanficfare/adapters/adapter_mcstoriescom.py
index 289758cc..954d15bb 100644
--- a/fanficfare/adapters/adapter_mcstoriescom.py
+++ b/fanficfare/adapters/adapter_mcstoriescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,14 +19,17 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-import urlparse
import os
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class MCStoriesComSiteAdapter(BaseSiteAdapter):
@@ -85,7 +88,7 @@ class MCStoriesComSiteAdapter(BaseSiteAdapter):
soup1 = self.make_soup(data1)
#strip comments from soup
[comment.extract() for comment in soup1.find_all(text=lambda text:isinstance(text, Comment))]
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_mediaminerorg.py b/fanficfare/adapters/adapter_mediaminerorg.py
index eeee32c4..c23c1326 100644
--- a/fanficfare/adapters/adapter_mediaminerorg.py
+++ b/fanficfare/adapters/adapter_mediaminerorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,11 +20,13 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class MediaMinerOrgSiteAdapter(BaseSiteAdapter):
@@ -108,7 +110,7 @@ class MediaMinerOrgSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url) # w/o trailing / gets 'chapter list' page even for one-shots.
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
logger.error("404 on %s"%url)
raise exceptions.StoryDoesNotExist(self.url)
diff --git a/fanficfare/adapters/adapter_merlinficdtwinscouk.py b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
index 7345eeae..2385ae0a 100644
--- a/fanficfare/adapters/adapter_merlinficdtwinscouk.py
+++ b/fanficfare/adapters/adapter_merlinficdtwinscouk.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -122,7 +123,7 @@ class MerlinFicDtwinsCoUk(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -147,7 +148,7 @@ class MerlinFicDtwinsCoUk(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_midnightwhispers.py b/fanficfare/adapters/adapter_midnightwhispers.py
index 7f818744..7c885752 100644
--- a/fanficfare/adapters/adapter_midnightwhispers.py
+++ b/fanficfare/adapters/adapter_midnightwhispers.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# Search for XXX comments--that's where things are most likely to need changing.
@@ -106,7 +107,7 @@ class MidnightwhispersAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -139,7 +140,7 @@ class MidnightwhispersAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_mttjustoncenet.py b/fanficfare/adapters/adapter_mttjustoncenet.py
index ace5dd19..5349b40b 100644
--- a/fanficfare/adapters/adapter_mttjustoncenet.py
+++ b/fanficfare/adapters/adapter_mttjustoncenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_naiceanilmenet.py b/fanficfare/adapters/adapter_naiceanilmenet.py
index d6ccc27e..758ef07f 100644
--- a/fanficfare/adapters/adapter_naiceanilmenet.py
+++ b/fanficfare/adapters/adapter_naiceanilmenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2016 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_narutoficorg.py b/fanficfare/adapters/adapter_narutoficorg.py
index e65a7666..2d0bbd92 100644
--- a/fanficfare/adapters/adapter_narutoficorg.py
+++ b/fanficfare/adapters/adapter_narutoficorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_nationallibrarynet.py b/fanficfare/adapters/adapter_nationallibrarynet.py
index 61c77349..b569b0c2 100644
--- a/fanficfare/adapters/adapter_nationallibrarynet.py
+++ b/fanficfare/adapters/adapter_nationallibrarynet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -83,7 +84,7 @@ class NationalLibraryNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_ncisficcom.py b/fanficfare/adapters/adapter_ncisficcom.py
index 7f378448..bcab3f51 100644
--- a/fanficfare/adapters/adapter_ncisficcom.py
+++ b/fanficfare/adapters/adapter_ncisficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -82,7 +83,7 @@ class NCISFicComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_ncisfictioncom.py b/fanficfare/adapters/adapter_ncisfictioncom.py
index 03746e5b..aa90d1c3 100644
--- a/fanficfare/adapters/adapter_ncisfictioncom.py
+++ b/fanficfare/adapters/adapter_ncisfictioncom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_nfacommunitycom.py b/fanficfare/adapters/adapter_nfacommunitycom.py
index f860c6a0..e7da293b 100644
--- a/fanficfare/adapters/adapter_nfacommunitycom.py
+++ b/fanficfare/adapters/adapter_nfacommunitycom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# Search for XXX comments--that's where things are most likely to need changing.
@@ -101,7 +102,7 @@ class NfaCommunityComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -134,7 +135,7 @@ class NfaCommunityComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_nhamagicalworldsus.py b/fanficfare/adapters/adapter_nhamagicalworldsus.py
index 9b74d3e1..6328e04c 100644
--- a/fanficfare/adapters/adapter_nhamagicalworldsus.py
+++ b/fanficfare/adapters/adapter_nhamagicalworldsus.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_ninelivesarchivecom.py b/fanficfare/adapters/adapter_ninelivesarchivecom.py
index e0d6eacc..72907fe8 100644
--- a/fanficfare/adapters/adapter_ninelivesarchivecom.py
+++ b/fanficfare/adapters/adapter_ninelivesarchivecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_nocturnallightnet.py b/fanficfare/adapters/adapter_nocturnallightnet.py
index 65612154..a0a63161 100644
--- a/fanficfare/adapters/adapter_nocturnallightnet.py
+++ b/fanficfare/adapters/adapter_nocturnallightnet.py
@@ -2,11 +2,13 @@
from __future__ import absolute_import
import re
-import urllib2
-import urlparse
-
from bs4.element import Tag
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
@@ -48,7 +50,7 @@ class NocturnalLightNetAdapter(BaseSiteAdapter):
if exception:
try:
data = self._fetchUrl(url, parameters)
- except urllib2.HTTPError:
+ except HTTPError:
raise exception(self.url)
# Just let self._fetchUrl throw the exception, don't catch and
# customize it.
diff --git a/fanficfare/adapters/adapter_noveltrovecom.py b/fanficfare/adapters/adapter_noveltrovecom.py
index 5948674e..0dd88c38 100644
--- a/fanficfare/adapters/adapter_noveltrovecom.py
+++ b/fanficfare/adapters/adapter_noveltrovecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -23,9 +23,12 @@ import logging
import json
import re
import sys # ## used for debug purposes
-import urllib2
import datetime
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
@@ -88,7 +91,7 @@ class NovelTroveComSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('Error 404: {0}'.format(self.url))
else:
diff --git a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
index 7e321405..14ff543e 100644
--- a/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
+++ b/fanficfare/adapters/adapter_occlumencysycophanthexcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -114,7 +115,7 @@ class OcclumencySycophantHexComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_phoenixsongnet.py b/fanficfare/adapters/adapter_phoenixsongnet.py
index d84c1146..22c37153 100644
--- a/fanficfare/adapters/adapter_phoenixsongnet.py
+++ b/fanficfare/adapters/adapter_phoenixsongnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,14 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2, urllib, cookielib
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -111,7 +113,7 @@ class PhoenixSongNetAdapter(BaseSiteAdapter):
if self.getConfig('force_login'):
self.performLogin(url)
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_ponyfictionarchivenet.py b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
index 8a6d5028..f3be33f6 100644
--- a/fanficfare/adapters/adapter_ponyfictionarchivenet.py
+++ b/fanficfare/adapters/adapter_ponyfictionarchivenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -92,7 +93,7 @@ class PonyFictionArchiveNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -113,7 +114,7 @@ class PonyFictionArchiveNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_potionsandsnitches.py b/fanficfare/adapters/adapter_potionsandsnitches.py
index 142b2bdf..49fa91f3 100644
--- a/fanficfare/adapters/adapter_potionsandsnitches.py
+++ b/fanficfare/adapters/adapter_potionsandsnitches.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,12 +21,13 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class PotionsAndSnitchesOrgSiteAdapter(BaseSiteAdapter):
@@ -65,7 +66,7 @@ class PotionsAndSnitchesOrgSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_potterficscom.py b/fanficfare/adapters/adapter_potterficscom.py
index c7323227..582e3c83 100644
--- a/fanficfare/adapters/adapter_potterficscom.py
+++ b/fanficfare/adapters/adapter_potterficscom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ import datetime
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter
# This function is called by the downloader in all adapter_*.py files
@@ -145,7 +146,7 @@ class PotterFicsComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_potterheadsanonymouscom.py b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
index 5aad67b5..97f53150 100644
--- a/fanficfare/adapters/adapter_potterheadsanonymouscom.py
+++ b/fanficfare/adapters/adapter_potterheadsanonymouscom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -122,7 +123,7 @@ class PotterHeadsAnonymousComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -156,7 +157,7 @@ class PotterHeadsAnonymousComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_pretendercentrecom.py b/fanficfare/adapters/adapter_pretendercentrecom.py
index c6bc603d..789a0184 100644
--- a/fanficfare/adapters/adapter_pretendercentrecom.py
+++ b/fanficfare/adapters/adapter_pretendercentrecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -91,7 +92,7 @@ class PretenderCenterComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -111,7 +112,7 @@ class PretenderCenterComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_qafficcom.py b/fanficfare/adapters/adapter_qafficcom.py
index 4bea9eeb..7fa6df5f 100644
--- a/fanficfare/adapters/adapter_qafficcom.py
+++ b/fanficfare/adapters/adapter_qafficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -88,7 +89,7 @@ class QafFicComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -108,7 +109,7 @@ class QafFicComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_quotevcom.py b/fanficfare/adapters/adapter_quotevcom.py
index 588a5ea5..84ddfdc2 100644
--- a/fanficfare/adapters/adapter_quotevcom.py
+++ b/fanficfare/adapters/adapter_quotevcom.py
@@ -2,11 +2,14 @@
from __future__ import absolute_import
import re
-import urlparse
-import urllib2
import datetime
from .. import exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter
from ..htmlcleanup import stripHTML
@@ -52,7 +55,7 @@ class QuotevComAdapter(BaseSiteAdapter):
def extractChapterUrlsAndMetadata(self):
try:
data = self._fetchUrl(self.url)
- except urllib2.HTTPError as e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist("Code: %s: %s"%(e.code,self.url))
else:
diff --git a/fanficfare/adapters/adapter_royalroadl.py b/fanficfare/adapters/adapter_royalroadl.py
index 44b6cfd9..959f6419 100644
--- a/fanficfare/adapters/adapter_royalroadl.py
+++ b/fanficfare/adapters/adapter_royalroadl.py
@@ -18,14 +18,17 @@
from __future__ import absolute_import
import contextlib
from datetime import datetime
-import httplib
import logging
import re
-import urllib2
-
from .. import exceptions as exceptions
from ..dateutils import parse_relative_date_string
from ..htmlcleanup import stripHTML
+
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves import http_client as httplib
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter
logger = logging.getLogger(__name__)
@@ -143,7 +146,7 @@ class RoyalRoadAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_samandjacknet.py b/fanficfare/adapters/adapter_samandjacknet.py
index 372a7a29..7a6395f6 100644
--- a/fanficfare/adapters/adapter_samandjacknet.py
+++ b/fanficfare/adapters/adapter_samandjacknet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
@@ -151,7 +152,7 @@ class SamAndJackNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -184,7 +185,7 @@ class SamAndJackNetAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_scarvesandcoffeenet.py b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
index 0b195f4c..79677349 100644
--- a/fanficfare/adapters/adapter_scarvesandcoffeenet.py
+++ b/fanficfare/adapters/adapter_scarvesandcoffeenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -96,7 +97,7 @@ class ScarvesAndCoffeeNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -116,7 +117,7 @@ class ScarvesAndCoffeeNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_sebklainenet.py b/fanficfare/adapters/adapter_sebklainenet.py
index 05d05ae8..116d5f70 100644
--- a/fanficfare/adapters/adapter_sebklainenet.py
+++ b/fanficfare/adapters/adapter_sebklainenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_sheppardweircom.py b/fanficfare/adapters/adapter_sheppardweircom.py
index f8eee2c9..1a959410 100644
--- a/fanficfare/adapters/adapter_sheppardweircom.py
+++ b/fanficfare/adapters/adapter_sheppardweircom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
@@ -146,7 +147,7 @@ class SheppardWeirComAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_shriftweborgbfa.py b/fanficfare/adapters/adapter_shriftweborgbfa.py
index d6fdbe77..261fcd7e 100644
--- a/fanficfare/adapters/adapter_shriftweborgbfa.py
+++ b/fanficfare/adapters/adapter_shriftweborgbfa.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -25,9 +25,12 @@ from __future__ import absolute_import
''' This adapter scrapes the metadata and chapter text from stories on archive.shriftweb.org '''
import logging
import re
-import urllib2
import sys
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
@@ -98,7 +101,7 @@ class BFAArchiveShriftwebOrgSiteAdapter(BaseSiteAdapter):
'''
try:
page_data = self._fetchUrl(page)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(page))
else:
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py b/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
index f69d31e8..e2c56509 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomunicornfic.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py b/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
index c8d868eb..595c4c66 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomwhisperedmuse.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py b/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
index 38ea9e04..44cc733f 100644
--- a/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
+++ b/fanficfare/adapters/adapter_sinfuldreamscomwickedtemptation.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_siyecouk.py b/fanficfare/adapters/adapter_siyecouk.py
index 34914c05..e08524a6 100644
--- a/fanficfare/adapters/adapter_siyecouk.py
+++ b/fanficfare/adapters/adapter_siyecouk.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
# This function is called by the downloader in all adapter_*.py files
@@ -83,7 +84,7 @@ class SiyeCoUkAdapter(BaseSiteAdapter): # XXX
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_spikeluvercom.py b/fanficfare/adapters/adapter_spikeluvercom.py
index 6150644f..3d1493fb 100644
--- a/fanficfare/adapters/adapter_spikeluvercom.py
+++ b/fanficfare/adapters/adapter_spikeluvercom.py
@@ -1,12 +1,15 @@
# Software: eFiction
from __future__ import absolute_import
import re
-import urllib2
-import urlparse
from bs4.element import Tag
from ..htmlcleanup import stripHTML
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
@@ -52,7 +55,7 @@ class SpikeluverComAdapter(BaseSiteAdapter):
if exception:
try:
data = self._fetchUrl(url, parameters)
- except urllib2.HTTPError:
+ except HTTPError:
raise exception(self.url)
# Just let self._fetchUrl throw the exception, don't catch and
# customize it.
diff --git a/fanficfare/adapters/adapter_squidgeorgpeja.py b/fanficfare/adapters/adapter_squidgeorgpeja.py
index 8e703f2f..105866ed 100644
--- a/fanficfare/adapters/adapter_squidgeorgpeja.py
+++ b/fanficfare/adapters/adapter_squidgeorgpeja.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
@@ -93,7 +94,7 @@ class SquidgeOrgPejaAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_starskyhutcharchivenet.py b/fanficfare/adapters/adapter_starskyhutcharchivenet.py
index 331978a4..9c4df433 100644
--- a/fanficfare/adapters/adapter_starskyhutcharchivenet.py
+++ b/fanficfare/adapters/adapter_starskyhutcharchivenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_starslibrarynet.py b/fanficfare/adapters/adapter_starslibrarynet.py
index fa060ae0..a5557eba 100644
--- a/fanficfare/adapters/adapter_starslibrarynet.py
+++ b/fanficfare/adapters/adapter_starslibrarynet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_storiesofardacom.py b/fanficfare/adapters/adapter_storiesofardacom.py
index 8154191b..0a0de89b 100644
--- a/fanficfare/adapters/adapter_storiesofardacom.py
+++ b/fanficfare/adapters/adapter_storiesofardacom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -77,7 +78,7 @@ class StoriesOfArdaComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_storiesonlinenet.py b/fanficfare/adapters/adapter_storiesonlinenet.py
index 4ada3b52..f7b9e744 100644
--- a/fanficfare/adapters/adapter_storiesonlinenet.py
+++ b/fanficfare/adapters/adapter_storiesonlinenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,14 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
#
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -134,7 +136,7 @@ class StoriesOnlineNetAdapter(BaseSiteAdapter):
self.needToLogin = False
try:
data = self._fetchUrl(url+":i")
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in (404, 410):
raise exceptions.StoryDoesNotExist("Code: %s: %s"%(e.code,self.url))
elif e.code == 401:
@@ -148,7 +150,7 @@ class StoriesOnlineNetAdapter(BaseSiteAdapter):
self.performLogin(url)
try:
data = self._fetchUrl(url+":i",usecache=False)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in (404, 410):
raise exceptions.StoryDoesNotExist("Code: %s: %s"%(e.code,self.url))
elif e.code == 401:
@@ -264,7 +266,7 @@ class StoriesOnlineNetAdapter(BaseSiteAdapter):
page = page + 1
try:
data = self._fetchUrl(self.story.getList('authorUrl')[0] + "/" + unicode(page))
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.FailedToDownload("Story not found in Author's list--change Listings Theme back to "+self.getTheme())
asoup = self.make_soup(data)
diff --git a/fanficfare/adapters/adapter_sugarquillnet.py b/fanficfare/adapters/adapter_sugarquillnet.py
index 739f92bf..e64b629e 100644
--- a/fanficfare/adapters/adapter_sugarquillnet.py
+++ b/fanficfare/adapters/adapter_sugarquillnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -30,13 +30,16 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import sys
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -89,7 +92,7 @@ class SugarQuillNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(url)
else:
@@ -127,7 +130,7 @@ class SugarQuillNetAdapter(BaseSiteAdapter):
logger.debug('Getting the author page: {0}'.format(author_Url))
try:
adata = self._fetchUrl(author_Url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in 404:
raise exceptions.StoryDoesNotExist("Author Page: Code: 404. {0}".format(author_Url))
elif e.code == 410:
diff --git a/fanficfare/adapters/adapter_swordborderlineangelcom.py b/fanficfare/adapters/adapter_swordborderlineangelcom.py
index e58274bf..3b2cfd09 100644
--- a/fanficfare/adapters/adapter_swordborderlineangelcom.py
+++ b/fanficfare/adapters/adapter_swordborderlineangelcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py b/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
index 0a6c717b..3ad19ac4 100644
--- a/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
+++ b/fanficfare/adapters/adapter_tasteofpoisoninkubationnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_tenhawkpresentscom.py b/fanficfare/adapters/adapter_tenhawkpresentscom.py
index e9a5da95..c65f23a1 100644
--- a/fanficfare/adapters/adapter_tenhawkpresentscom.py
+++ b/fanficfare/adapters/adapter_tenhawkpresentscom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,12 +21,13 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class TenhawkPresentsComSiteAdapter(BaseSiteAdapter):
@@ -111,7 +112,7 @@ class TenhawkPresentsComSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_test1.py b/fanficfare/adapters/adapter_test1.py
index 74a87dab..7d0f02b4 100644
--- a/fanficfare/adapters/adapter_test1.py
+++ b/fanficfare/adapters/adapter_test1.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_tgstorytimecom.py b/fanficfare/adapters/adapter_tgstorytimecom.py
index ddc950f9..69bcd962 100644
--- a/fanficfare/adapters/adapter_tgstorytimecom.py
+++ b/fanficfare/adapters/adapter_tgstorytimecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_thebrokenworldorg.py b/fanficfare/adapters/adapter_thebrokenworldorg.py
index a32c350a..d4133b51 100644
--- a/fanficfare/adapters/adapter_thebrokenworldorg.py
+++ b/fanficfare/adapters/adapter_thebrokenworldorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_thedelphicexpansecom.py b/fanficfare/adapters/adapter_thedelphicexpansecom.py
index 77a7ebc2..c634cbb5 100644
--- a/fanficfare/adapters/adapter_thedelphicexpansecom.py
+++ b/fanficfare/adapters/adapter_thedelphicexpansecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_thehookupzonenet.py b/fanficfare/adapters/adapter_thehookupzonenet.py
index 30aca527..285ff594 100644
--- a/fanficfare/adapters/adapter_thehookupzonenet.py
+++ b/fanficfare/adapters/adapter_thehookupzonenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_themaplebookshelf.py b/fanficfare/adapters/adapter_themaplebookshelf.py
index 05ea1f59..e9affbf9 100644
--- a/fanficfare/adapters/adapter_themaplebookshelf.py
+++ b/fanficfare/adapters/adapter_themaplebookshelf.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_themasquenet.py b/fanficfare/adapters/adapter_themasquenet.py
index c914b975..ee69babf 100644
--- a/fanficfare/adapters/adapter_themasquenet.py
+++ b/fanficfare/adapters/adapter_themasquenet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -129,7 +130,7 @@ class TheMasqueNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -154,7 +155,7 @@ class TheMasqueNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_thepetulantpoetesscom.py b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
index 436eccbd..e22b3ecb 100644
--- a/fanficfare/adapters/adapter_thepetulantpoetesscom.py
+++ b/fanficfare/adapters/adapter_thepetulantpoetesscom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -112,7 +113,7 @@ class ThePetulantPoetessComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_thundercatsfansorg.py b/fanficfare/adapters/adapter_thundercatsfansorg.py
index 05beaf1e..534fdddd 100644
--- a/fanficfare/adapters/adapter_thundercatsfansorg.py
+++ b/fanficfare/adapters/adapter_thundercatsfansorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_tolkienfanfiction.py b/fanficfare/adapters/adapter_tolkienfanfiction.py
index 2455b1ad..de09296b 100644
--- a/fanficfare/adapters/adapter_tolkienfanfiction.py
+++ b/fanficfare/adapters/adapter_tolkienfanfiction.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -59,15 +59,18 @@ import time
import logging
logger = logging.getLogger(__name__)
import re
-import urllib
-import urllib2
-import urlparse
import string
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+from ..six.moves import urllib
+
from .base_adapter import BaseSiteAdapter, makeDate
def _is_story_url(url):
@@ -131,7 +134,7 @@ class TolkienFanfictionAdapter(BaseSiteAdapter):
chapterSoup = self.make_soup(chapterHtml)
indexLink = chapterSoup.find("a", text="[Index]")
self._normalizeURL('http://' + self.getSiteDomain() + '/' + indexLink.get('href'))
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -141,7 +144,7 @@ class TolkienFanfictionAdapter(BaseSiteAdapter):
try:
indexHtml = _fix_broken_markup(self._fetchUrl(self.url))
soup = self.make_soup(indexHtml)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -207,7 +210,7 @@ class TolkienFanfictionAdapter(BaseSiteAdapter):
date = searchSoup.find(text="Updated:").nextSibling.string
logger.debug("Last Updated: '%s'" % date)
self.story.setMetadata('dateUpdated', makeDate(date, self.dateformat))
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_tomparisdormcom.py b/fanficfare/adapters/adapter_tomparisdormcom.py
index fd3a584b..169c757a 100644
--- a/fanficfare/adapters/adapter_tomparisdormcom.py
+++ b/fanficfare/adapters/adapter_tomparisdormcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,13 +20,16 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import sys
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -79,7 +82,7 @@ class TomParisDormComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(url)
else:
diff --git a/fanficfare/adapters/adapter_trekfanfictionnet.py b/fanficfare/adapters/adapter_trekfanfictionnet.py
index e92fbc63..1304e4f2 100644
--- a/fanficfare/adapters/adapter_trekfanfictionnet.py
+++ b/fanficfare/adapters/adapter_trekfanfictionnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -28,7 +28,9 @@ This will scrape the chapter text and metadata from stories on the site trekfanf
'''
import logging
import re
-import urllib2
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -92,7 +94,7 @@ class TrekFanFictionNetSiteAdapter(BaseSiteAdapter):
'''
try:
page_data = self._fetchUrl(page)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(page))
else:
diff --git a/fanficfare/adapters/adapter_trekiverseorg.py b/fanficfare/adapters/adapter_trekiverseorg.py
index e13b6b69..7dd98a60 100644
--- a/fanficfare/adapters/adapter_trekiverseorg.py
+++ b/fanficfare/adapters/adapter_trekiverseorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -126,7 +127,7 @@ class TrekiverseOrgAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
@@ -151,7 +152,7 @@ class TrekiverseOrgAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_tthfanficorg.py b/fanficfare/adapters/adapter_tthfanficorg.py
index c7d07f6c..bbed206f 100644
--- a/fanficfare/adapters/adapter_tthfanficorg.py
+++ b/fanficfare/adapters/adapter_tthfanficorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,10 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class TwistingTheHellmouthSiteAdapter(BaseSiteAdapter):
@@ -149,7 +152,7 @@ class TwistingTheHellmouthSiteAdapter(BaseSiteAdapter):
data = self._fetchUrl(url)
#print("data:%s"%data)
soup = self.make_soup(data)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code in (404,410):
raise exceptions.StoryDoesNotExist(url)
else:
@@ -199,7 +202,7 @@ class TwistingTheHellmouthSiteAdapter(BaseSiteAdapter):
#logger.info("authsoup:%s"%authorsoup)
descurl=nextpage
authorsoup = self.make_soup(authordata)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(url)
else:
@@ -236,7 +239,7 @@ class TwistingTheHellmouthSiteAdapter(BaseSiteAdapter):
stripHTML(a),
stripHTML(autha)),'https://'+self.host+a['href'])
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(url)
else:
diff --git a/fanficfare/adapters/adapter_twilightarchivescom.py b/fanficfare/adapters/adapter_twilightarchivescom.py
index 80ff8cb3..fde1cb8c 100644
--- a/fanficfare/adapters/adapter_twilightarchivescom.py
+++ b/fanficfare/adapters/adapter_twilightarchivescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -78,7 +79,7 @@ class TwilightArchivesComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_twilightednet.py b/fanficfare/adapters/adapter_twilightednet.py
index e0803659..959a239e 100644
--- a/fanficfare/adapters/adapter_twilightednet.py
+++ b/fanficfare/adapters/adapter_twilightednet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,12 +21,13 @@ import logging
logger = logging.getLogger(__name__)
import re
import urllib
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class TwilightedNetSiteAdapter(BaseSiteAdapter):
@@ -101,7 +102,7 @@ class TwilightedNetSiteAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_unknowableroomorg.py b/fanficfare/adapters/adapter_unknowableroomorg.py
index 4fb51199..b8ec3f50 100644
--- a/fanficfare/adapters/adapter_unknowableroomorg.py
+++ b/fanficfare/adapters/adapter_unknowableroomorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,9 +21,12 @@ from __future__ import absolute_import
''' This adapter will download stories from the site unknowableroom.org '''
import logging
import re
-import urllib2
import sys
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
@@ -83,7 +86,7 @@ class UnknowableRoomOrgSiteAdapter(BaseSiteAdapter):
'''
try:
page_data = self._fetchUrl(page)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(page))
else:
diff --git a/fanficfare/adapters/adapter_valentchambercom.py b/fanficfare/adapters/adapter_valentchambercom.py
index 0d067759..ba35317d 100644
--- a/fanficfare/adapters/adapter_valentchambercom.py
+++ b/fanficfare/adapters/adapter_valentchambercom.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 FanFicFare team
-# Copyright 2016 FanFicFare team
+# Copyright 2018 FanFicFare team
+# Copyright 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_voracity2eficcom.py b/fanficfare/adapters/adapter_voracity2eficcom.py
index e1108ee9..8a47b2d5 100644
--- a/fanficfare/adapters/adapter_voracity2eficcom.py
+++ b/fanficfare/adapters/adapter_voracity2eficcom.py
@@ -1,11 +1,14 @@
# Software: eFiction
from __future__ import absolute_import
import re
-import urllib2
-import urlparse
from bs4.element import Tag
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions
@@ -81,7 +84,7 @@ class Voracity2EficComAdapter(BaseSiteAdapter):
if exception:
try:
data = self._fetchUrl(url, parameters)
- except urllib2.HTTPError:
+ except HTTPError:
raise exception(self.url)
# Just let self._fetchUrl throw the exception, don't catch and
# customize it.
diff --git a/fanficfare/adapters/adapter_walkingtheplankorg.py b/fanficfare/adapters/adapter_walkingtheplankorg.py
index 930d9920..73e36f82 100644
--- a/fanficfare/adapters/adapter_walkingtheplankorg.py
+++ b/fanficfare/adapters/adapter_walkingtheplankorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -86,7 +87,7 @@ class WalkingThePlankOrgAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_wattpadcom.py b/fanficfare/adapters/adapter_wattpadcom.py
index 5e23a9ac..2e6041e1 100644
--- a/fanficfare/adapters/adapter_wattpadcom.py
+++ b/fanficfare/adapters/adapter_wattpadcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_webnovelcom.py b/fanficfare/adapters/adapter_webnovelcom.py
index 38e61944..39b9744d 100644
--- a/fanficfare/adapters/adapter_webnovelcom.py
+++ b/fanficfare/adapters/adapter_webnovelcom.py
@@ -23,7 +23,9 @@ import json
import logging
import re
import time
-import urllib2
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter
from .. import exceptions as exceptions
@@ -101,7 +103,7 @@ class WWWWebNovelComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('Error 404: {0}'.format(self.url))
else:
diff --git a/fanficfare/adapters/adapter_whoficcom.py b/fanficfare/adapters/adapter_whoficcom.py
index 517c73ed..983151e5 100644
--- a/fanficfare/adapters/adapter_whoficcom.py
+++ b/fanficfare/adapters/adapter_whoficcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,12 +20,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
class WhoficComSiteAdapter(BaseSiteAdapter):
@@ -60,7 +61,7 @@ class WhoficComSiteAdapter(BaseSiteAdapter):
# use BeautifulSoup HTML parser to make everything easier to find.
try:
soup = self.make_soup(self._fetchUrl(url))
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_wolverineandroguecom.py b/fanficfare/adapters/adapter_wolverineandroguecom.py
index f2733bf5..ca10f248 100644
--- a/fanficfare/adapters/adapter_wolverineandroguecom.py
+++ b/fanficfare/adapters/adapter_wolverineandroguecom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -78,7 +79,7 @@ class WolverineAndRogueComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_wraithbaitcom.py b/fanficfare/adapters/adapter_wraithbaitcom.py
index 491424bc..095602b8 100644
--- a/fanficfare/adapters/adapter_wraithbaitcom.py
+++ b/fanficfare/adapters/adapter_wraithbaitcom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,12 +19,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
@@ -87,7 +88,7 @@ class WraithBaitComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
index 6ca24192..fe4f8274 100644
--- a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
+++ b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,13 +20,16 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import sys
from bs4.element import Comment
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -84,7 +87,7 @@ class WritingWhimsicalwanderingsNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url+addurl)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(url)
else:
diff --git a/fanficfare/adapters/adapter_wuxiaworldco.py b/fanficfare/adapters/adapter_wuxiaworldco.py
index b5d69105..a84f4ea4 100644
--- a/fanficfare/adapters/adapter_wuxiaworldco.py
+++ b/fanficfare/adapters/adapter_wuxiaworldco.py
@@ -19,8 +19,10 @@
from __future__ import absolute_import
import logging
import re
-import urllib2
-import urlparse
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
from fanficfare.htmlcleanup import stripHTML
@@ -68,7 +70,7 @@ class WuxiaWorldCoSiteAdapter(BaseSiteAdapter):
logger.debug('URL: %s', self.url)
try:
data = self._fetchUrl(self.url)
- except urllib2.HTTPError, exception:
+ except HTTPError as exception:
if exception.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(self.url))
raise exception
diff --git a/fanficfare/adapters/adapter_wuxiaworldcom.py b/fanficfare/adapters/adapter_wuxiaworldcom.py
index 83a11ba9..9bb3263e 100644
--- a/fanficfare/adapters/adapter_wuxiaworldcom.py
+++ b/fanficfare/adapters/adapter_wuxiaworldcom.py
@@ -20,8 +20,10 @@ from __future__ import absolute_import
import json
import logging
import re
-import urllib2
-import urlparse
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
from ..htmlcleanup import stripHTML
@@ -79,7 +81,7 @@ class WuxiaWorldComSiteAdapter(BaseSiteAdapter):
logger.debug('URL: %s', self.url)
try:
data = self._fetchUrl(self.url)
- except urllib2.HTTPError, exception:
+ except HTTPError as exception:
if exception.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(self.url))
raise exception
diff --git a/fanficfare/adapters/adapter_www13hoursorg.py b/fanficfare/adapters/adapter_www13hoursorg.py
index 7e5cff09..4c92cc4f 100644
--- a/fanficfare/adapters/adapter_www13hoursorg.py
+++ b/fanficfare/adapters/adapter_www13hoursorg.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_wwwaneroticstorycom.py b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
index 9743106b..4e6414e7 100644
--- a/fanficfare/adapters/adapter_wwwaneroticstorycom.py
+++ b/fanficfare/adapters/adapter_wwwaneroticstorycom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2013 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2013 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,11 +20,13 @@ import logging
import os
import re
import sys
-import urllib2
-import urlparse
-
from bs4.element import Comment
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
@@ -94,7 +96,7 @@ class WWWAnEroticStoryComAdapter(BaseSiteAdapter):
#strip comments and scripts from soup
[comment.extract() for comment in soup1.find_all(text=lambda text:isinstance(text, Comment))]
[script.extract() for script in soup1.find_all('script')]
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_wwwarea52hkhnet.py b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
index b0133952..fccee9c4 100644
--- a/fanficfare/adapters/adapter_wwwarea52hkhnet.py
+++ b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -28,11 +28,13 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
-
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -96,7 +98,7 @@ class WWWArea52HKHNetAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
diff --git a/fanficfare/adapters/adapter_wwwgiantessworldnet.py b/fanficfare/adapters/adapter_wwwgiantessworldnet.py
index 9945e205..b44938c3 100644
--- a/fanficfare/adapters/adapter_wwwgiantessworldnet.py
+++ b/fanficfare/adapters/adapter_wwwgiantessworldnet.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2016 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/adapter_wwwlushstoriescom.py b/fanficfare/adapters/adapter_wwwlushstoriescom.py
index 038b7e8f..36ac32fc 100644
--- a/fanficfare/adapters/adapter_wwwlushstoriescom.py
+++ b/fanficfare/adapters/adapter_wwwlushstoriescom.py
@@ -24,12 +24,16 @@ from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
from bs4 import Comment, BeautifulSoup
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+from ..six.moves.urllib.parse import quote
+
from .base_adapter import BaseSiteAdapter, makeDate
####################################################################################################
@@ -57,12 +61,12 @@ class WWWLushStoriesComAdapter(BaseSiteAdapter): # XXX
if '%' not in storyId:
## assume already escaped if contains %. Assume needs escaping if it doesn't.
try:
- storyId = urllib2.quote(storyId)
+ storyId = quote(storyId)
except KeyError:
## string from calibre is utf8, but lushstories.com
## expects extended chars to be in latin1 / iso-8859-1
## rather than utf8.
- storyId = urllib2.quote(storyId.encode("iso-8859-1"))
+ storyId = quote(storyId.encode("iso-8859-1"))
self.story.setMetadata('storyId',storyId)
@@ -119,7 +123,7 @@ class WWWLushStoriesComAdapter(BaseSiteAdapter): # XXX
'''
try:
page_data = self._fetchUrl(page)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(page))
else:
@@ -173,7 +177,7 @@ class WWWLushStoriesComAdapter(BaseSiteAdapter): # XXX
authorurl = self.story.getMetadata('authorUrl')
try:
adata = self._fetchUrl(authorurl)
- except (urllib2.HTTPError) as e:
+ except (HTTPError) as e:
## Can't get the author's page, so we use what is on the story page
tags = soup.find('div',{'id':'storytags'}).find('a')
if tags:
@@ -207,7 +211,7 @@ class WWWLushStoriesComAdapter(BaseSiteAdapter): # XXX
for story in asoup.findAll('div',{'class':'entrycontent'}):
for link in story.find_all('a'):
if '/stories/' in link['href']:
- linkh = urllib2.quote(link['href'].encode('utf-8', 'ignore'))
+ linkh = quote(link['href'].encode('utf-8', 'ignore'))
linkh = linkh.replace('%3A', ':')
# print self.url
# print linkh
diff --git a/fanficfare/adapters/adapter_wwwnovelallcom.py b/fanficfare/adapters/adapter_wwwnovelallcom.py
index 00e58f8d..d61e3593 100644
--- a/fanficfare/adapters/adapter_wwwnovelallcom.py
+++ b/fanficfare/adapters/adapter_wwwnovelallcom.py
@@ -23,8 +23,10 @@ from __future__ import absolute_import
import logging
import re
import json
-import urllib2
-import urlparse
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -109,7 +111,7 @@ class WWWNovelAllComAdapter(BaseSiteAdapter):
try:
data = self._fetchUrl(url)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(url))
else:
diff --git a/fanficfare/adapters/adapter_wwwutopiastoriescom.py b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
index 8226e970..fab7711b 100644
--- a/fanficfare/adapters/adapter_wwwutopiastoriescom.py
+++ b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2012 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2012 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -32,13 +32,18 @@ We get the category from the author's page
import logging
logger = logging.getLogger(__name__)
import re
-import urllib2
import sys
from bs4.element import Comment
+
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
+# py2 vs py3 transition
+from ..six import text_type as unicode
+from ..six.moves.urllib.error import HTTPError
+from ..six.moves.urllib.parse import quote
+
from .base_adapter import BaseSiteAdapter, makeDate
def getClass():
@@ -95,7 +100,7 @@ class WWWUtopiastoriesComAdapter(BaseSiteAdapter):
'''
try:
page_data = self._fetchUrl(page)
- except urllib2.HTTPError, e:
+ except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist('404 error: {}'.format(page))
else:
@@ -155,7 +160,7 @@ class WWWUtopiastoriesComAdapter(BaseSiteAdapter):
else:
self.story.setMetadata('authorId',a['href'].split('/')[2])
self.story.setMetadata('author',a.string)
- self.story.setMetadata('authorUrl','http://'+self.host+urllib2.quote(
+ self.story.setMetadata('authorUrl','http://'+self.host+quote(
a['href'].encode('UTF-8')))
elif 'Story Codes' in heading:
self.story.setMetadata('eroticatags',text.replace('Story Codes - ',''))
diff --git a/fanficfare/adapters/base_adapter.py b/fanficfare/adapters/base_adapter.py
index 6c90520a..fb7e02c1 100644
--- a/fanficfare/adapters/base_adapter.py
+++ b/fanficfare/adapters/base_adapter.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/adapters/base_xenforoforum_adapter.py b/fanficfare/adapters/base_xenforoforum_adapter.py
index b5549495..0773b7c0 100644
--- a/fanficfare/adapters/base_xenforoforum_adapter.py
+++ b/fanficfare/adapters/base_xenforoforum_adapter.py
@@ -21,13 +21,13 @@ logger = logging.getLogger(__name__)
import re
from xml.dom.minidom import parseString
+from ..htmlcleanup import stripHTML
+from .. import exceptions as exceptions
+
# py2 vs py3 transition
from ..six import text_type as unicode
from ..six.moves.urllib.error import HTTPError
-from ..htmlcleanup import stripHTML
-from .. import exceptions as exceptions
-
from .base_adapter import BaseSiteAdapter, makeDate
logger = logging.getLogger(__name__)
diff --git a/fanficfare/dateutils.py b/fanficfare/dateutils.py
index 2716e253..1caf0a3f 100644
--- a/fanficfare/dateutils.py
+++ b/fanficfare/dateutils.py
@@ -22,7 +22,7 @@ from datetime import datetime, timedelta
import logging
logger = logging.getLogger(__name__)
-UNIX_EPOCHE = datetime.fromtimestamp(0)
+UNIX_EPOCHE = datetime.fromtimestamp(86400)
## Currently used by adapter_webnovelcom & adapter_wwwnovelallcom
diff --git a/fanficfare/geturls.py b/fanficfare/geturls.py
index d0175830..de0fff41 100644
--- a/fanficfare/geturls.py
+++ b/fanficfare/geturls.py
@@ -20,11 +20,11 @@ import collections
import email
import imaplib
import re
+
+# unicode in py2, str in py3
from .six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
from .six.moves.urllib.parse import (urlparse, urlunparse)
-# unicode in py2, str in py3
from .six import text_type as unicode
-
from .six import ensure_str
import logging
diff --git a/fanficfare/writers/__init__.py b/fanficfare/writers/__init__.py
index 0d378170..f1cb1325 100644
--- a/fanficfare/writers/__init__.py
+++ b/fanficfare/writers/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2011 Fanficdownloader team, 2015 FanFicFare team
+# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
From a38621d66fdea1492f88328789489534f878cd79 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 31 Jul 2018 19:16:02 -0500
Subject: [PATCH 029/120] Comment out pickle.
---
fanficfare/configurable.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index dd572dca..22a6fcf2 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -34,7 +34,7 @@ from .six import string_types as basestring
import time
import logging
import sys
-import pickle
+# import pickle
try:
from google.appengine.api import apiproxy_stub_map
From de9d49c0fc48de071bf9df38656f74a9d22f7b1e Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 11:20:36 -0500
Subject: [PATCH 030/120] More py2/py3
---
fanficfare/adapters/adapter_adultfanfictionorg.py | 7 ++++---
fanficfare/adapters/adapter_fireflyfansnet.py | 3 ++-
fanficfare/adapters/adapter_fireflypopulliorg.py | 2 +-
fanficfare/adapters/adapter_lotrgficcom.py | 5 +++--
fanficfare/adapters/adapter_tolkienfanfiction.py | 4 ++--
fanficfare/adapters/adapter_tomparisdormcom.py | 2 +-
fanficfare/adapters/adapter_wattpadcom.py | 4 ++--
.../adapters/adapter_writingwhimsicalwanderingsnet.py | 2 +-
fanficfare/adapters/adapter_wwwarea52hkhnet.py | 5 +++--
fanficfare/adapters/adapter_wwwutopiastoriescom.py | 5 +++--
fanficfare/configurable.py | 9 ++++++---
fanficfare/writers/base_writer.py | 7 ++++---
12 files changed, 32 insertions(+), 23 deletions(-)
diff --git a/fanficfare/adapters/adapter_adultfanfictionorg.py b/fanficfare/adapters/adapter_adultfanfictionorg.py
index 80789adf..8baf8273 100644
--- a/fanficfare/adapters/adapter_adultfanfictionorg.py
+++ b/fanficfare/adapters/adapter_adultfanfictionorg.py
@@ -30,6 +30,7 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
+from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -235,7 +236,7 @@ class AdultFanFictionOrgAdapter(BaseSiteAdapter):
# Find the chapters:
chapters = soup.find('div',{'class':'dropdown-content'})
for i, chapter in enumerate(chapters.findAll('a')):
- self.add_chapter(chapter,self.url+'&chapter='+str(i+1))
+ self.add_chapter(chapter,self.url+'&chapter='+unicode(i+1))
# Find authorid and URL from... author url.
@@ -302,7 +303,7 @@ class AdultFanFictionOrgAdapter(BaseSiteAdapter):
while i == 0:
##We already have the first page, so if this is the first time through, skip getting the page
if page != 1:
- author_Url = '{0}&view=story&zone={1}&page={2}'.format(self.story.getMetadata('authorUrl'), self.zone, str(page))
+ author_Url = '{0}&view=story&zone={1}&page={2}'.format(self.story.getMetadata('authorUrl'), self.zone, unicode(page))
logger.debug('Getting the author page: {0}'.format(author_Url))
try:
adata = self._fetchUrl(author_Url)
@@ -337,7 +338,7 @@ class AdultFanFictionOrgAdapter(BaseSiteAdapter):
##There is also a double , so we have to fix that, then remove the leading and trailing '-:-'.
##They are always in the same order.
## EDIT 09/26/2016: Had some trouble with unicode errors... so I had to put in the decode/encode parts to fix it
- liMetadata = str(lc2).decode('utf-8').replace('\n','').replace('\r','').replace('\t',' ').replace(' ',' ').replace(' ',' ').replace(' ',' ')
+ liMetadata = ensure_text(lc2).replace('\n','').replace('\r','').replace('\t',' ').replace(' ',' ').replace(' ',' ').replace(' ',' ')
liMetadata = stripHTML(liMetadata.replace(r' ','-:-').replace('','-:-'))
liMetadata = liMetadata.strip('-:-').strip('-:-').encode('utf-8')
for i, value in enumerate(liMetadata.decode('utf-8').split('-:-')):
diff --git a/fanficfare/adapters/adapter_fireflyfansnet.py b/fanficfare/adapters/adapter_fireflyfansnet.py
index 989e5b4b..b5a6676a 100644
--- a/fanficfare/adapters/adapter_fireflyfansnet.py
+++ b/fanficfare/adapters/adapter_fireflyfansnet.py
@@ -24,6 +24,7 @@ import re
import sys
# py2 vs py3 transition
from ..six import text_type as unicode
+from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -133,7 +134,7 @@ class FireFlyFansNetSiteAdapter(BaseSiteAdapter):
# which is usualy FireFly on this site, but I'm going to get them
# anyway.a
category = soup.find('span', {'id': 'MainContent_txtItemDetails'})
- category = stripHTML(str(category).replace(b"\xc2\xa0", ' '))
+ category = stripHTML(ensure_text(category).replace(b"\xc2\xa0", ' '))
metad = category.split(' ')
for meta in metad:
if ":" in meta:
diff --git a/fanficfare/adapters/adapter_fireflypopulliorg.py b/fanficfare/adapters/adapter_fireflypopulliorg.py
index 50f6cf40..a49b8abc 100644
--- a/fanficfare/adapters/adapter_fireflypopulliorg.py
+++ b/fanficfare/adapters/adapter_fireflypopulliorg.py
@@ -269,7 +269,7 @@ class FireflyPopulliOrgSiteAdapter(BaseSiteAdapter):
else:
## This should catch anything else, and shouldn't ever really be gotten
# to, but I'm going to have it print out in the debugger, just in case
- logger.debug('Metadata not caught: %s' % str(meta))
+ logger.debug('Metadata not caught: %s' % unicode(meta))
zzzzzzzz = 0
elif label == 'characters':
self.story.setMetadata('characters', value)
diff --git a/fanficfare/adapters/adapter_lotrgficcom.py b/fanficfare/adapters/adapter_lotrgficcom.py
index ffda5d6b..3ac0cc06 100644
--- a/fanficfare/adapters/adapter_lotrgficcom.py
+++ b/fanficfare/adapters/adapter_lotrgficcom.py
@@ -28,6 +28,7 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
+from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -255,7 +256,7 @@ class LOTRgficComAdapter(BaseSiteAdapter):
## dedicated tag, so we have to split some hairs..
## This may not work every time... but I tested it with 6 stories...
mdata = metad[0]
- while '' not in str(mdata.nextSibling):
+ while '' not in unicode(mdata.nextSibling):
mdata = mdata.nextSibling
self.setDescription(url,mdata.previousSibling.previousSibling.get_text())
@@ -286,7 +287,7 @@ class LOTRgficComAdapter(BaseSiteAdapter):
#
#
## we'll have to remove the non-breaking spaces to get this to work.
- metad = str(metad).replace(b"\xc2\xa0",'').replace('\n','')
+ metad = ensure_text(metad).replace(b"\xc2\xa0",'').replace('\n','')
for txt in metad.split(' '):
if 'Challenges:' in txt:
txt = txt.replace('Challenges:','').strip()
diff --git a/fanficfare/adapters/adapter_tolkienfanfiction.py b/fanficfare/adapters/adapter_tolkienfanfiction.py
index de09296b..f2230189 100644
--- a/fanficfare/adapters/adapter_tolkienfanfiction.py
+++ b/fanficfare/adapters/adapter_tolkienfanfiction.py
@@ -67,7 +67,7 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
-from ..six.moves.urllib import parse as urlparse
+from ..six.moves.urllib.parse import urlencode
from ..six.moves.urllib.error import HTTPError
from ..six.moves import urllib
@@ -196,7 +196,7 @@ class TolkienFanfictionAdapter(BaseSiteAdapter):
logger.debug('Title as `str`: ' + unicode(title))
# For publication date we need to search
try:
- queryString = urllib.urlencode((
+ queryString = urlencode((
('type', 3),
('field', 1),
# need translate here for the weird accented letters
diff --git a/fanficfare/adapters/adapter_tomparisdormcom.py b/fanficfare/adapters/adapter_tomparisdormcom.py
index 169c757a..8da180f6 100644
--- a/fanficfare/adapters/adapter_tomparisdormcom.py
+++ b/fanficfare/adapters/adapter_tomparisdormcom.py
@@ -123,7 +123,7 @@ class TomParisDormComAdapter(BaseSiteAdapter):
# Get the rest of the Metadata
mdsoup = soup.find('div',{'id' : 'output'})
- mdstr = str(mdsoup).replace('\n','').replace('\r','').replace('\t',' ').replace(' ',' ').replace(' ',' ').replace(' ',' ')
+ mdstr = unicode(mdsoup).replace('\n','').replace('\r','').replace('\t',' ').replace(' ',' ').replace(' ',' ').replace(' ',' ')
mdstr = stripHTML(mdstr.replace(r' ',r'-:-').replace('|','-:-'))
mdstr = mdstr.replace(r'[Rev',r'-:-[Rev').replace(' -:- ','-:-').strip('-:-').strip('-:-')
diff --git a/fanficfare/adapters/adapter_wattpadcom.py b/fanficfare/adapters/adapter_wattpadcom.py
index 2e6041e1..1c5ed295 100644
--- a/fanficfare/adapters/adapter_wattpadcom.py
+++ b/fanficfare/adapters/adapter_wattpadcom.py
@@ -122,8 +122,8 @@ class WattpadComAdapter(BaseSiteAdapter):
# CATEGORIES
try:
- storyCategories = [WattpadComAdapter.CATEGORY_DEFs.get(str(c)) for c in storyInfo['categories'] if
- WattpadComAdapter.CATEGORY_DEFs.has_key(str(c))]
+ storyCategories = [WattpadComAdapter.CATEGORY_DEFs.get(unicode(c)) for c in storyInfo['categories'] if
+ WattpadComAdapter.CATEGORY_DEFs.has_key(unicode(c))]
self.story.setMetadata('category', storyCategories[0])
self.story.setMetadata('tags', storyInfo['tags'])
diff --git a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
index fe4f8274..5aa5205b 100644
--- a/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
+++ b/fanficfare/adapters/adapter_writingwhimsicalwanderingsnet.py
@@ -152,7 +152,7 @@ class WritingWhimsicalwanderingsNetAdapter(BaseSiteAdapter):
## I know I'm replacing alot of 's here, but I want to make sure that they are all
## the same, so we can split the string correctly.
metad = soup.find('div',{'class':'listbox'})
- metad = str(metad.renderContents()).replace('\n',' ').replace(' ','|||||||').replace(' ','|||||||').replace(' ','|||||||').strip()
+ metad = unicode(metad.renderContents()).replace('\n',' ').replace(' ','|||||||').replace(' ','|||||||').replace(' ','|||||||').strip()
while '||||||||' in metad:
metad = metad.replace('||||||||','|||||||')
metad = stripHTML(metad)
diff --git a/fanficfare/adapters/adapter_wwwarea52hkhnet.py b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
index fccee9c4..8dce5df5 100644
--- a/fanficfare/adapters/adapter_wwwarea52hkhnet.py
+++ b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
@@ -33,6 +33,7 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
+from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -191,7 +192,7 @@ class WWWArea52HKHNetAdapter(BaseSiteAdapter):
## I've seen a non-breaking space in some of the storyblocks
## so we are going to remove them.
- series = stripHTML(str(series.renderContents()).replace(b"\xc2\xa0",'')).strip()
+ series = stripHTML(ensure_text(series.renderContents()).replace(b"\xc2\xa0",'')).strip()
if len(series) > 0:
self.story.setMetadata('series',series)
@@ -230,7 +231,7 @@ class WWWArea52HKHNetAdapter(BaseSiteAdapter):
if not self.getConfig("keep_summary_html"):
value = stripHTML(value).replace('Summary:','').strip()
else:
- value = str(value).replace('Summary:','').strip()
+ value = unicode(value).replace('Summary:','').strip()
self.setDescription(url, value)
# grab the text for an individual chapter.
diff --git a/fanficfare/adapters/adapter_wwwutopiastoriescom.py b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
index fab7711b..a62c8d8a 100644
--- a/fanficfare/adapters/adapter_wwwutopiastoriescom.py
+++ b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
@@ -41,6 +41,7 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
+from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from ..six.moves.urllib.parse import quote
@@ -147,12 +148,12 @@ class WWWUtopiastoriesComAdapter(BaseSiteAdapter):
for detail in soup.findAll('li'):
- det = str(detail).replace(b"\xc2\xa0",'')
+ det = ensure_text(detail).replace(b"\xc2\xa0",'')
heading = stripHTML(det).split(' - ')[0]
text = stripHTML(det).replace(heading+' - ','')
if 'Author' in heading:
a = detail.find('a')
- if 'mailto' in str(a):
+ if 'mailto' in unicode(a):
self.story.setMetadata('authorId','0000000000')
self.story.setMetadata('authorUrl',self.url)
self.story.setMetadata('author','Unknown')
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 22a6fcf2..113c1ce4 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -25,11 +25,13 @@ from . import six
from .six.moves import configparser
from .six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
from .six.moves import urllib
+from .six.moves.urllib.parse import urlencode
from .six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
from .six.moves.urllib.error import HTTPError
from .six.moves import http_cookiejar as cl
from .six import text_type as unicode
from .six import string_types as basestring
+from .six import ensure_binary
import time
import logging
@@ -672,7 +674,7 @@ class Configuration(configparser.SafeConfigParser):
vlist = re.split(r'(?
Date: Wed, 1 Aug 2018 11:57:05 -0500
Subject: [PATCH 031/120] Fix translit.
---
fanficfare/translit.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/fanficfare/translit.py b/fanficfare/translit.py
index 096ccff2..ec05d738 100644
--- a/fanficfare/translit.py
+++ b/fanficfare/translit.py
@@ -2,6 +2,10 @@
# Code taken from http://python.su/forum/viewtopic.php?pid=66946
from __future__ import absolute_import
+# py2 vs py3 transition
+from .six import text_type as unicode
+from .six import ensure_text
+
import unicodedata
def is_syllable(letter):
syllables = ("A", "E", "I", "O", "U", "a", "e", "i", "o", "u")
@@ -39,7 +43,7 @@ def romanize(letter):
return func(filter(is_consonant, unid))
def translit(text):
output = ""
- for letter in text:
+ for letter in ensure_text(text):
output += romanize(letter)
return output
#def main():
From f1d4f2f8bb8150b459662a8033607dd883dac040 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 11:59:59 -0500
Subject: [PATCH 032/120] Fix Request for POST
---
fanficfare/configurable.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index 113c1ce4..e9f722f2 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -26,7 +26,7 @@ from .six.moves import configparser
from .six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
from .six.moves import urllib
from .six.moves.urllib.parse import urlencode
-from .six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
+from .six.moves.urllib.request import (build_opener, HTTPCookieProcessor, Request)
from .six.moves.urllib.error import HTTPError
from .six.moves import http_cookiejar as cl
from .six import text_type as unicode
@@ -1023,15 +1023,15 @@ class Configuration(configparser.SafeConfigParser):
logger.debug("#####################################\npagecache(POST) MISS: %s"%safe_url(cachekey))
self.do_sleep(extrasleep)
- ## urllib.Request assumes POST when data!=None. Also assumes data
+ ## Request assumes POST when data!=None. Also assumes data
## is application/x-www-form-urlencoded.
if 'Content-type' not in headers:
headers['Content-type']='application/x-www-form-urlencoded'
if 'Accept' not in headers:
headers['Accept']="text/html,*/*"
- req = urllib.Request(url,
- data=urlencode(parameters),
- headers=headers)
+ req = Request(url,
+ data=urlencode(parameters),
+ headers=headers)
## Specific UA because too many sites are blocking the default python UA.
self.opener.addheaders = [('User-Agent', self.getConfig('user_agent')),
From 5179a2cd23c4adcdf9632f031d93a90860a7625c Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 12:03:01 -0500
Subject: [PATCH 033/120] Document fromtimestamp(86400)
---
fanficfare/adapters/adapter_inkbunnynet.py | 3 +--
fanficfare/dateutils.py | 2 ++
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/fanficfare/adapters/adapter_inkbunnynet.py b/fanficfare/adapters/adapter_inkbunnynet.py
index 1b6c2c5b..03f9f2e9 100644
--- a/fanficfare/adapters/adapter_inkbunnynet.py
+++ b/fanficfare/adapters/adapter_inkbunnynet.py
@@ -31,11 +31,10 @@ from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
from ..htmlcleanup import stripHTML
+from ..dateutils import UNIX_EPOCHE
-UNIX_EPOCHE = datetime.fromtimestamp(86400)
logger = logging.getLogger(__name__)
-
def getClass():
return InkBunnyNetSiteAdapter
diff --git a/fanficfare/dateutils.py b/fanficfare/dateutils.py
index 1caf0a3f..3fba84e3 100644
--- a/fanficfare/dateutils.py
+++ b/fanficfare/dateutils.py
@@ -22,6 +22,8 @@ from datetime import datetime, timedelta
import logging
logger = logging.getLogger(__name__)
+## There's a windows / py3 bug that prevents using 0.
+## So Jan 2, 1970 instead.
UNIX_EPOCHE = datetime.fromtimestamp(86400)
## Currently used by adapter_webnovelcom & adapter_wwwnovelallcom
From 61c3af67e1fa92a954af2346297e0707b93cf4f0 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 12:14:43 -0500
Subject: [PATCH 034/120] Fix for offsetting unicode in mediaminer.org title
---
fanficfare/adapters/adapter_mediaminerorg.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/fanficfare/adapters/adapter_mediaminerorg.py b/fanficfare/adapters/adapter_mediaminerorg.py
index c23c1326..291542f5 100644
--- a/fanficfare/adapters/adapter_mediaminerorg.py
+++ b/fanficfare/adapters/adapter_mediaminerorg.py
@@ -122,7 +122,7 @@ class MediaMinerOrgSiteAdapter(BaseSiteAdapter):
## title:
##
A, A' Fan Fiction ❯ Mmmmm
- titletext = stripHTML(soup.find("h1",{"id":"post-title"}))
+ titletext = unicode(stripHTML(soup.find("h1",{"id":"post-title"})))
titletext = titletext[titletext.index(u'❯')+2:]
# print("title:(%s)"%titletext)
self.story.setMetadata('title',titletext)
From d43b90642f28f441f5a068fccb0b13cc28c29d49 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 12:55:45 -0500
Subject: [PATCH 035/120] Fixes for encoding/make unicode issues.
---
fanficfare/adapters/adapter_adultfanfictionorg.py | 3 +--
fanficfare/adapters/adapter_fireflyfansnet.py | 3 +--
fanficfare/adapters/adapter_lotrgficcom.py | 3 +--
fanficfare/adapters/adapter_wwwarea52hkhnet.py | 3 +--
fanficfare/adapters/adapter_wwwutopiastoriescom.py | 3 +--
5 files changed, 5 insertions(+), 10 deletions(-)
diff --git a/fanficfare/adapters/adapter_adultfanfictionorg.py b/fanficfare/adapters/adapter_adultfanfictionorg.py
index 8baf8273..4acb861b 100644
--- a/fanficfare/adapters/adapter_adultfanfictionorg.py
+++ b/fanficfare/adapters/adapter_adultfanfictionorg.py
@@ -30,7 +30,6 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
-from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -338,7 +337,7 @@ class AdultFanFictionOrgAdapter(BaseSiteAdapter):
##There is also a double , so we have to fix that, then remove the leading and trailing '-:-'.
##They are always in the same order.
## EDIT 09/26/2016: Had some trouble with unicode errors... so I had to put in the decode/encode parts to fix it
- liMetadata = ensure_text(lc2).replace('\n','').replace('\r','').replace('\t',' ').replace(' ',' ').replace(' ',' ').replace(' ',' ')
+ liMetadata = unicode(lc2).replace('\n','').replace('\r','').replace('\t',' ').replace(' ',' ').replace(' ',' ').replace(' ',' ')
liMetadata = stripHTML(liMetadata.replace(r' ','-:-').replace('','-:-'))
liMetadata = liMetadata.strip('-:-').strip('-:-').encode('utf-8')
for i, value in enumerate(liMetadata.decode('utf-8').split('-:-')):
diff --git a/fanficfare/adapters/adapter_fireflyfansnet.py b/fanficfare/adapters/adapter_fireflyfansnet.py
index b5a6676a..a6abba78 100644
--- a/fanficfare/adapters/adapter_fireflyfansnet.py
+++ b/fanficfare/adapters/adapter_fireflyfansnet.py
@@ -24,7 +24,6 @@ import re
import sys
# py2 vs py3 transition
from ..six import text_type as unicode
-from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -134,7 +133,7 @@ class FireFlyFansNetSiteAdapter(BaseSiteAdapter):
# which is usualy FireFly on this site, but I'm going to get them
# anyway.a
category = soup.find('span', {'id': 'MainContent_txtItemDetails'})
- category = stripHTML(ensure_text(category).replace(b"\xc2\xa0", ' '))
+ category = stripHTML(unicode(category).replace(u"\xc2\xa0", ' '))
metad = category.split(' ')
for meta in metad:
if ":" in meta:
diff --git a/fanficfare/adapters/adapter_lotrgficcom.py b/fanficfare/adapters/adapter_lotrgficcom.py
index 3ac0cc06..d1e32d81 100644
--- a/fanficfare/adapters/adapter_lotrgficcom.py
+++ b/fanficfare/adapters/adapter_lotrgficcom.py
@@ -28,7 +28,6 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
-from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -287,7 +286,7 @@ class LOTRgficComAdapter(BaseSiteAdapter):
#
#
## we'll have to remove the non-breaking spaces to get this to work.
- metad = ensure_text(metad).replace(b"\xc2\xa0",'').replace('\n','')
+ metad = unicode(metad).replace(u"\xc2\xa0",'').replace('\n','')
for txt in metad.split(' '):
if 'Challenges:' in txt:
txt = txt.replace('Challenges:','').strip()
diff --git a/fanficfare/adapters/adapter_wwwarea52hkhnet.py b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
index 8dce5df5..b49503f1 100644
--- a/fanficfare/adapters/adapter_wwwarea52hkhnet.py
+++ b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
@@ -33,7 +33,6 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
-from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
@@ -192,7 +191,7 @@ class WWWArea52HKHNetAdapter(BaseSiteAdapter):
## I've seen a non-breaking space in some of the storyblocks
## so we are going to remove them.
- series = stripHTML(ensure_text(series.renderContents()).replace(b"\xc2\xa0",'')).strip()
+ series = stripHTML(unicode(series.renderContents()).replace(u"\xc2\xa0",'')).strip()
if len(series) > 0:
self.story.setMetadata('series',series)
diff --git a/fanficfare/adapters/adapter_wwwutopiastoriescom.py b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
index a62c8d8a..1f7882ef 100644
--- a/fanficfare/adapters/adapter_wwwutopiastoriescom.py
+++ b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
@@ -41,7 +41,6 @@ from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
-from ..six import ensure_text
from ..six.moves.urllib.error import HTTPError
from ..six.moves.urllib.parse import quote
@@ -148,7 +147,7 @@ class WWWUtopiastoriesComAdapter(BaseSiteAdapter):
for detail in soup.findAll('li'):
- det = ensure_text(detail).replace(b"\xc2\xa0",'')
+ det = unicode(detail).replace(u"\xc2\xa0",'')
heading = stripHTML(det).split(' - ')[0]
text = stripHTML(det).replace(heading+' - ','')
if 'Author' in heading:
From 49f78457ee500e08e0475d4c6febc6a74f96b6ce Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 15:01:13 -0500
Subject: [PATCH 036/120] Strip \xa0 for in stripHTML()--this may need
better placement.
---
fanficfare/adapters/adapter_test1.py | 2 +-
fanficfare/htmlcleanup.py | 7 +++++--
2 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/fanficfare/adapters/adapter_test1.py b/fanficfare/adapters/adapter_test1.py
index 7d0f02b4..5e25f864 100644
--- a/fanficfare/adapters/adapter_test1.py
+++ b/fanficfare/adapters/adapter_test1.py
@@ -113,7 +113,7 @@ class TestSiteAdapter(BaseSiteAdapter):
raise exceptions.FailedToLogin(self.url,self.username)
if idstr == '664':
- self.story.setMetadata(u'title',"Test Story Title "+idstr+self.crazystring)
+ self.story.setMetadata(u'title',"Test Story Title "+idstr+self.crazystring+" ")
self.story.setMetadata('author','Test Author aa bare amp(&) quote(') amp(&)')
else:
self.story.setMetadata(u'title',"Test Story Title "+idstr)
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index 44ee5983..285e2882 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -63,10 +63,13 @@ def _replaceNotEntities(data):
def stripHTML(soup):
if isinstance(soup,basestring) or hasattr(soup, 'bs3'):
- return removeAllEntities(re.sub(r'<[^>]+>','',"%s" % soup)).strip()
+ retval = removeAllEntities(re.sub(r'<[^>]+>','',"%s" % soup)).strip()
else:
# bs4 already converts all the entities to UTF8 chars.
- return soup.get_text(strip=True)
+ retval = soup.get_text(strip=True)
+ # some change in the python3 branch started making '\xa0'
+ # instead of ' '
+ return retval.strip(u'\xa0')
def conditionalRemoveEntities(value):
if isinstance(value,basestring):
From 01c836f2366e4d0dfa777d1ceb32a63d83dc47eb Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 17:46:17 -0500
Subject: [PATCH 037/120] More stripping \xa0 in adapters.
---
fanficfare/adapters/adapter_fireflyfansnet.py | 2 +-
fanficfare/adapters/adapter_fireflypopulliorg.py | 4 ++--
fanficfare/adapters/adapter_lotrgficcom.py | 2 +-
fanficfare/adapters/adapter_wwwarea52hkhnet.py | 2 +-
fanficfare/adapters/adapter_wwwutopiastoriescom.py | 2 +-
5 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/fanficfare/adapters/adapter_fireflyfansnet.py b/fanficfare/adapters/adapter_fireflyfansnet.py
index a6abba78..160815d7 100644
--- a/fanficfare/adapters/adapter_fireflyfansnet.py
+++ b/fanficfare/adapters/adapter_fireflyfansnet.py
@@ -133,7 +133,7 @@ class FireFlyFansNetSiteAdapter(BaseSiteAdapter):
# which is usualy FireFly on this site, but I'm going to get them
# anyway.a
category = soup.find('span', {'id': 'MainContent_txtItemDetails'})
- category = stripHTML(unicode(category).replace(u"\xc2\xa0", ' '))
+ category = stripHTML(unicode(category).replace(u"\xa0", u' '))
metad = category.split(' ')
for meta in metad:
if ":" in meta:
diff --git a/fanficfare/adapters/adapter_fireflypopulliorg.py b/fanficfare/adapters/adapter_fireflypopulliorg.py
index a49b8abc..07539ed1 100644
--- a/fanficfare/adapters/adapter_fireflypopulliorg.py
+++ b/fanficfare/adapters/adapter_fireflypopulliorg.py
@@ -186,7 +186,7 @@ class FireflyPopulliOrgSiteAdapter(BaseSiteAdapter):
# There alot of nbsp's (non broken spaces) in here, so I'm going to remove them
# I'm also getting rid of the bold tags and the nextline characters to make it
# easier to get the information below
- story = repr(story).replace(b'\\xa0', '').replace(' ',' ').replace(
+ story = repr(story).replace(u'\\xa0', '').replace(' ',' ').replace(
'','').replace('','').replace(r'\n','')
story = self.make_soup(story).find('p')
story_a = story.find('a')
@@ -319,7 +319,7 @@ class FireflyPopulliOrgSiteAdapter(BaseSiteAdapter):
# the end of the section, which has alot of extraneous things, then adding my own div
# wrapper, recreating the soup, then getting that div from the soup again, before sending to
# the writers.
- story = repr(story).replace(b'\\xa0', '').replace(' ',' ').replace(r'\n','').strip()
+ story = repr(story).replace(u'\\xa0', '').replace(' ',' ').replace(r'\n','').strip()
story = story[12:]
story = story[:story.find('
Please <')]
story = '
' + story + '
'
diff --git a/fanficfare/adapters/adapter_lotrgficcom.py b/fanficfare/adapters/adapter_lotrgficcom.py
index d1e32d81..5d75e9fc 100644
--- a/fanficfare/adapters/adapter_lotrgficcom.py
+++ b/fanficfare/adapters/adapter_lotrgficcom.py
@@ -286,7 +286,7 @@ class LOTRgficComAdapter(BaseSiteAdapter):
#
#
## we'll have to remove the non-breaking spaces to get this to work.
- metad = unicode(metad).replace(u"\xc2\xa0",'').replace('\n','')
+ metad = unicode(metad).replace(u"\xa0",'').replace('\n','')
for txt in metad.split(' '):
if 'Challenges:' in txt:
txt = txt.replace('Challenges:','').strip()
diff --git a/fanficfare/adapters/adapter_wwwarea52hkhnet.py b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
index b49503f1..b83d06d4 100644
--- a/fanficfare/adapters/adapter_wwwarea52hkhnet.py
+++ b/fanficfare/adapters/adapter_wwwarea52hkhnet.py
@@ -191,7 +191,7 @@ class WWWArea52HKHNetAdapter(BaseSiteAdapter):
## I've seen a non-breaking space in some of the storyblocks
## so we are going to remove them.
- series = stripHTML(unicode(series.renderContents()).replace(u"\xc2\xa0",'')).strip()
+ series = stripHTML(unicode(series.renderContents()).replace(u"\xa0",'')).strip()
if len(series) > 0:
self.story.setMetadata('series',series)
diff --git a/fanficfare/adapters/adapter_wwwutopiastoriescom.py b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
index 1f7882ef..cad6b565 100644
--- a/fanficfare/adapters/adapter_wwwutopiastoriescom.py
+++ b/fanficfare/adapters/adapter_wwwutopiastoriescom.py
@@ -147,7 +147,7 @@ class WWWUtopiastoriesComAdapter(BaseSiteAdapter):
for detail in soup.findAll('li'):
- det = unicode(detail).replace(u"\xc2\xa0",'')
+ det = unicode(detail).replace(u"\xa0",'')
heading = stripHTML(det).split(' - ')[0]
text = stripHTML(det).replace(heading+' - ','')
if 'Author' in heading:
From 17aca1bb718ef3ba2bab07576a50e3c183ba18c6 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 19:12:13 -0500
Subject: [PATCH 038/120] more py2/py3 fixes
---
fanficfare/adapters/adapter_fanfichu.py | 5 +++--
fanficfare/adapters/adapter_literotica.py | 2 +-
fanficfare/adapters/adapter_masseffect2in.py | 4 +++-
fanficfare/adapters/adapter_test1.py | 7 +++++--
fanficfare/configurable.py | 2 +-
5 files changed, 13 insertions(+), 7 deletions(-)
diff --git a/fanficfare/adapters/adapter_fanfichu.py b/fanficfare/adapters/adapter_fanfichu.py
index 293077a3..b095402c 100644
--- a/fanficfare/adapters/adapter_fanfichu.py
+++ b/fanficfare/adapters/adapter_fanfichu.py
@@ -19,6 +19,7 @@ from __future__ import absolute_import
import re
# py2 vs py3 transition
from ..six import text_type as unicode
+from ..six import ensure_binary
from ..six.moves.urllib import parse as urlparse
from ..six.moves.urllib.error import HTTPError
@@ -87,7 +88,7 @@ class FanficHuAdapter(BaseSiteAdapter):
def extractChapterUrlsAndMetadata(self):
soup = self._customized_fetch_url(self.url + '&i=1')
- if soup.title.string.encode(_SOURCE_CODE_ENCODING).strip(' :') == 'írta':
+ if soup.title.string.encode(_SOURCE_CODE_ENCODING).strip(b' :') == 'írta':
raise exceptions.StoryDoesNotExist(self.url)
chapter_options = soup.find('form', action='viewstory.php').select('option')
@@ -143,7 +144,7 @@ class FanficHuAdapter(BaseSiteAdapter):
while index < len(cells):
cell = cells[index]
- key = cell.b.string.encode(_SOURCE_CODE_ENCODING).strip(':')
+ key = cell.b.string.encode(_SOURCE_CODE_ENCODING).strip(b':')
try:
value = cells[index+1].string.encode(_SOURCE_CODE_ENCODING)
except AttributeError:
diff --git a/fanficfare/adapters/adapter_literotica.py b/fanficfare/adapters/adapter_literotica.py
index 0422ae75..e0ea5e3a 100644
--- a/fanficfare/adapters/adapter_literotica.py
+++ b/fanficfare/adapters/adapter_literotica.py
@@ -346,7 +346,7 @@ class LiteroticaSiteAdapter(BaseSiteAdapter):
chapter_description = '
Description: %s
' % chapter_description
fullhtml += self.getPageText(raw_page, url)
if pages:
- for page_no in xrange(2, len(page_nums) + 1):
+ for page_no in range(2, len(page_nums) + 1):
page_url = url + "?page=%s" % page_no
logger.debug("page_url= %s" % page_url)
raw_page = self._fetchUrl(page_url)
diff --git a/fanficfare/adapters/adapter_masseffect2in.py b/fanficfare/adapters/adapter_masseffect2in.py
index 3de5f4e7..99ae3f87 100644
--- a/fanficfare/adapters/adapter_masseffect2in.py
+++ b/fanficfare/adapters/adapter_masseffect2in.py
@@ -20,11 +20,14 @@ import bs4
import datetime
import logging
import re
+from itertools import takewhile
+
from ..htmlcleanup import removeEntities, stripHTML
from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
from ..six.moves.urllib.error import HTTPError
+from ..six.moves import zip as izip
from .base_adapter import BaseSiteAdapter, makeDate
@@ -703,7 +706,6 @@ def _getLargestCommonPrefix(*args):
"""Returns largest common prefix of all unicode arguments, ignoring case.
:rtype : unicode
"""
- from itertools import takewhile, izip
toLower = lambda xs: map(lambda x: x.lower(), xs)
allSame = lambda xs: len(set(toLower(xs))) == 1
return u''.join([i[0] for i in takewhile(allSame, izip(*args))])
diff --git a/fanficfare/adapters/adapter_test1.py b/fanficfare/adapters/adapter_test1.py
index 5e25f864..2a253ae0 100644
--- a/fanficfare/adapters/adapter_test1.py
+++ b/fanficfare/adapters/adapter_test1.py
@@ -23,6 +23,9 @@ logger = logging.getLogger(__name__)
from .. import exceptions
+# py2 vs py3 transition
+from ..six import ensure_text
+
from .base_adapter import BaseSiteAdapter, makeDate
class TestSiteAdapter(BaseSiteAdapter):
@@ -68,13 +71,13 @@ class TestSiteAdapter(BaseSiteAdapter):
#print("addList:%s"%(nkey))
for val in self.get_config_list(sections,key):
#print("addList:%s->%s"%(nkey,val))
- self.story.addToList(nkey,val.decode('utf-8').replace('{{storyId}}',idstr))
+ self.story.addToList(nkey,ensure_text(val).replace('{{storyId}}',idstr))
else:
# Special cases:
if key in ['datePublished','dateUpdated']:
self.story.setMetadata(key,makeDate(self.get_config(sections,key),"%Y-%m-%d"))
else:
- self.story.setMetadata(key,self.get_config(sections,key).decode('utf-8').replace('{{storyId}}',idstr))
+ self.story.setMetadata(key,ensure_text(self.get_config(sections,key)).replace('{{storyId}}',idstr))
#print("set:%s->%s"%(key,self.story.getMetadata(key)))
if self.has_config(sections,'chapter_urls'):
diff --git a/fanficfare/configurable.py b/fanficfare/configurable.py
index e9f722f2..329c57e5 100644
--- a/fanficfare/configurable.py
+++ b/fanficfare/configurable.py
@@ -1030,7 +1030,7 @@ class Configuration(configparser.SafeConfigParser):
if 'Accept' not in headers:
headers['Accept']="text/html,*/*"
req = Request(url,
- data=urlencode(parameters),
+ data=ensure_binary(urlencode(parameters)),
headers=headers)
## Specific UA because too many sites are blocking the default python UA.
From a97b2d347ea00f21a309e3909101dc4e6b95b266 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 19:29:29 -0500
Subject: [PATCH 039/120] more py2/py3 fixes
---
fanficfare/dateutils.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/fanficfare/dateutils.py b/fanficfare/dateutils.py
index 3fba84e3..b8217449 100644
--- a/fanficfare/dateutils.py
+++ b/fanficfare/dateutils.py
@@ -19,6 +19,9 @@ from __future__ import absolute_import
from datetime import datetime, timedelta
+# py2 vs py3 transition
+from .six import text_type as unicode
+
import logging
logger = logging.getLogger(__name__)
From a893bdff9223ba453ac7a721de1b5fa101315f4b Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 1 Aug 2018 19:58:51 -0500
Subject: [PATCH 040/120] more py2/py3 fixes
---
fanficfare/adapters/adapter_wattpadcom.py | 7 +++++--
fanficfare/six.py | 1 -
included_dependencies/six.py | 1 -
3 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/fanficfare/adapters/adapter_wattpadcom.py b/fanficfare/adapters/adapter_wattpadcom.py
index 1c5ed295..d8bf5ce7 100644
--- a/fanficfare/adapters/adapter_wattpadcom.py
+++ b/fanficfare/adapters/adapter_wattpadcom.py
@@ -20,6 +20,9 @@ import json
import logging
import re
+# py2 vs py3 transition
+from ..six import text_type as unicode
+
from .base_adapter import BaseSiteAdapter, makeDate
from .. import exceptions as exceptions
logger = logging.getLogger(__name__)
@@ -46,7 +49,7 @@ class WattpadComAdapter(BaseSiteAdapter):
try:
WattpadComAdapter.CATEGORY_DEFs = json.loads(self._fetchUrl(WattpadComAdapter.API_GETCATEGORIES))
except:
- logger.debug('API_GETCATEGORIES failed.')
+ logger.warn('API_GETCATEGORIES failed.')
WattpadComAdapter.CATEGORY_DEFs = []
@staticmethod
@@ -88,7 +91,7 @@ class WattpadComAdapter(BaseSiteAdapter):
def doExtractChapterUrlsAndMetadata(self, get_cover=True):
try:
storyInfo = json.loads(self._fetchUrl(WattpadComAdapter.API_STORYINFO % self.storyId))
- logger.debug('storyInfo: %s' % json.dumps(storyInfo))
+ # logger.debug('storyInfo: %s' % json.dumps(storyInfo))
except Exception:
raise exceptions.InvalidStoryURL(self.url, self.getSiteDomain(), self.getSiteExampleURLs())
diff --git a/fanficfare/six.py b/fanficfare/six.py
index 0691cea7..9beb025a 100644
--- a/fanficfare/six.py
+++ b/fanficfare/six.py
@@ -30,7 +30,6 @@ import types
__author__ = "Benjamin Peterson "
__version__ = "1.11.0fff" # for version included in fanficfare
-print("fff six")
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
diff --git a/included_dependencies/six.py b/included_dependencies/six.py
index 7681506e..818a7fad 100644
--- a/included_dependencies/six.py
+++ b/included_dependencies/six.py
@@ -30,7 +30,6 @@ import types
__author__ = "Benjamin Peterson "
__version__ = "1.11.0fffinclib" # for version included in fanficfare
-print("included_dependencies six")
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
From 2762c3353f40c0e38f98133f4250e199c8f70836 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 2 Aug 2018 13:08:21 -0500
Subject: [PATCH 041/120] Encoding fixes for fanfic.hu, remove print from
quotev.com
---
fanficfare/adapters/adapter_fanfichu.py | 32 ++++++++++++------------
fanficfare/adapters/adapter_quotevcom.py | 1 -
2 files changed, 16 insertions(+), 17 deletions(-)
diff --git a/fanficfare/adapters/adapter_fanfichu.py b/fanficfare/adapters/adapter_fanfichu.py
index b095402c..4d7ff372 100644
--- a/fanficfare/adapters/adapter_fanfichu.py
+++ b/fanficfare/adapters/adapter_fanfichu.py
@@ -1,4 +1,4 @@
-# coding=utf-8
+# -*- coding: utf-8 -*-
# Copyright 2014 Fanficdownloader team, 2018 FanFicFare team
#
@@ -19,7 +19,7 @@ from __future__ import absolute_import
import re
# py2 vs py3 transition
from ..six import text_type as unicode
-from ..six import ensure_binary
+from ..six import ensure_text
from ..six.moves.urllib import parse as urlparse
from ..six.moves.urllib.error import HTTPError
@@ -88,7 +88,7 @@ class FanficHuAdapter(BaseSiteAdapter):
def extractChapterUrlsAndMetadata(self):
soup = self._customized_fetch_url(self.url + '&i=1')
- if soup.title.string.encode(_SOURCE_CODE_ENCODING).strip(b' :') == 'írta':
+ if ensure_text(soup.title.string).strip(u' :') == u'írta':
raise exceptions.StoryDoesNotExist(self.url)
chapter_options = soup.find('form', action='viewstory.php').select('option')
@@ -144,46 +144,46 @@ class FanficHuAdapter(BaseSiteAdapter):
while index < len(cells):
cell = cells[index]
- key = cell.b.string.encode(_SOURCE_CODE_ENCODING).strip(b':')
+ key = ensure_text(cell.b.string).strip(u':')
try:
- value = cells[index+1].string.encode(_SOURCE_CODE_ENCODING)
- except AttributeError:
+ value = ensure_text(cells[index+1].string)
+ except:
value = None
- if key == 'Kategória':
+ if key == u'Kategória':
for anchor in cells[index+1]('a'):
self.story.addToList('category', anchor.string)
- elif key == 'Szereplõk':
+ elif key == u'Szereplõk':
if cells[index+1].string:
for name in cells[index+1].string.split(', '):
self.story.addToList('character', name)
- elif key == 'Korhatár':
+ elif key == u'Korhatár':
if value != 'nem korhatáros':
self.story.setMetadata('rating', value)
- elif key == 'Figyelmeztetések':
+ elif key == u'Figyelmeztetések':
for b_tag in cells[index+1]('b'):
self.story.addToList('warnings', b_tag.string)
- elif key == 'Jellemzõk':
+ elif key == u'Jellemzõk':
for genre in cells[index+1].string.split(', '):
self.story.addToList('genre', genre)
- elif key == 'Fejezetek':
+ elif key == u'Fejezetek':
self.story.setMetadata('numChapters', int(value))
- elif key == 'Megjelenés':
+ elif key == u'Megjelenés':
self.story.setMetadata('datePublished', makeDate(value, self.DATE_FORMAT))
- elif key == 'Frissítés':
+ elif key == u'Frissítés':
self.story.setMetadata('dateUpdated', makeDate(value, self.DATE_FORMAT))
- elif key == 'Szavak':
+ elif key == u'Szavak':
self.story.setMetadata('numWords', value)
- elif key == 'Befejezett':
+ elif key == u'Befejezett':
self.story.setMetadata('status', 'Completed' if value == 'Nem' else 'In-Progress')
index += 2
diff --git a/fanficfare/adapters/adapter_quotevcom.py b/fanficfare/adapters/adapter_quotevcom.py
index 84ddfdc2..533dd08f 100644
--- a/fanficfare/adapters/adapter_quotevcom.py
+++ b/fanficfare/adapters/adapter_quotevcom.py
@@ -72,7 +72,6 @@ class QuotevComAdapter(BaseSiteAdapter):
authdiv = soup.find('div', {'class':"quizAuthorList"})
if authdiv:
- print("div:%s"%authdiv)
for a in authdiv.find_all('a'):
self.story.addToList('author', a.get_text())
self.story.addToList('authorId', a['href'].split('/')[-1])
From f83193dd64562a536b0a4bbc333e1ffcb05ba2b8 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 2 Aug 2018 13:20:48 -0500
Subject: [PATCH 042/120] More py3 deliberate incompatibilities.
---
fanficfare/adapters/adapter_wattpadcom.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/fanficfare/adapters/adapter_wattpadcom.py b/fanficfare/adapters/adapter_wattpadcom.py
index d8bf5ce7..dfe16a24 100644
--- a/fanficfare/adapters/adapter_wattpadcom.py
+++ b/fanficfare/adapters/adapter_wattpadcom.py
@@ -126,7 +126,7 @@ class WattpadComAdapter(BaseSiteAdapter):
# CATEGORIES
try:
storyCategories = [WattpadComAdapter.CATEGORY_DEFs.get(unicode(c)) for c in storyInfo['categories'] if
- WattpadComAdapter.CATEGORY_DEFs.has_key(unicode(c))]
+ unicode(c) in WattpadComAdapter.CATEGORY_DEFs]
self.story.setMetadata('category', storyCategories[0])
self.story.setMetadata('tags', storyInfo['tags'])
From eefad6628a32703afa3f7f3b29e5f5b9ad560e51 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 2 Aug 2018 14:03:34 -0500
Subject: [PATCH 043/120] Fixing mobi output for python2/3 dual version.
---
fanficfare/mobi.py | 47 ++++++++++++++++++++++++----------------------
1 file changed, 25 insertions(+), 22 deletions(-)
diff --git a/fanficfare/mobi.py b/fanficfare/mobi.py
index 24a6c41b..c61449a2 100644
--- a/fanficfare/mobi.py
+++ b/fanficfare/mobi.py
@@ -1,8 +1,10 @@
#!/usr/bin/python
+
+# -*- coding: utf-8 -*-
# Copyright(c) 2009 Andrew Chatham and Vijay Pandurangan
# Changes Copyright 2018 FanFicFare team
from __future__ import absolute_import
-
+
import struct
import time
import random
@@ -11,6 +13,7 @@ import logging
# py2 vs py3 transition
from .six import text_type as unicode
from .six import string_types as basestring
+from .six import ensure_binary
from .six import BytesIO # StringIO under py2
logger = logging.getLogger(__name__)
@@ -46,7 +49,7 @@ class _SubEntry:
def TocLink(self):
return '%.80s' % (self._name, self.title)
-
+
def Anchor(self):
return '' % self._name
@@ -94,7 +97,7 @@ class Converter:
htmltitle = html_strs[0]
entrytitle = _SubEntry(1, htmltitle)
title_html.append(entrytitle.Body())
-
+
title_html.append(PAGE_BREAK)
toc_html.append('
+Don't—e;ver—d;o—that—a;gain, 法 é
+
+horizontal rules
+
+
"Lorem ipsum dolor sit amet", consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore--et dolore magna aliqua. 'Ut enim ad minim veniam', quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+
+'''
+ soup = self.make_soup(text)
+ return self.utf8FromSoup(url,soup)
+
+def getClass():
+ return TestSiteAdapter
+
From c27c24d1b2b47bcf9b03e737f33e780840d576a3 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Mon, 6 Aug 2018 11:57:41 -0500
Subject: [PATCH 097/120] Remove accidental file.
---
fanficfare/adapters/adapter_test1.py.img | 414 -----------------------
1 file changed, 414 deletions(-)
delete mode 100644 fanficfare/adapters/adapter_test1.py.img
diff --git a/fanficfare/adapters/adapter_test1.py.img b/fanficfare/adapters/adapter_test1.py.img
deleted file mode 100644
index 999a6b87..00000000
--- a/fanficfare/adapters/adapter_test1.py.img
+++ /dev/null
@@ -1,414 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from __future__ import absolute_import
-import datetime
-import time
-import logging
-logger = logging.getLogger(__name__)
-
-from .. import exceptions
-
-# py2 vs py3 transition
-from ..six import ensure_text
-
-from .base_adapter import BaseSiteAdapter, makeDate
-
-class TestSiteAdapter(BaseSiteAdapter):
-
- def __init__(self, config, url):
- BaseSiteAdapter.__init__(self, config, url)
- self.story.setMetadata('siteabbrev','tst1')
- self.crazystring = u"tests:[bare amp(&) qt(') amp(&) gt(>) lt(<) ATnT(AT&T) L(£) Onna(女)]"
- # get storyId from url--url validation guarantees query is only sid=1234
- self.story.setMetadata('storyId',self.parsedUrl.query.split('=',)[1])
- self.username=''
- self.is_adult=False
- # happens inside BaseSiteAdapter.__init__
- # self._setURL(url)
-
- @staticmethod
- def getSiteDomain():
- return 'test1.com'
-
- @classmethod
- def getSiteExampleURLs(cls):
- return "http://"+cls.getSiteDomain()+"?sid=1234"
-
- def getSiteURLPattern(self):
- return BaseSiteAdapter.getSiteURLPattern(self)+r'/?\?sid=\d+$'
-
- def use_pagecache(self):
- return True
-
- def extractChapterUrlsAndMetadata(self):
- idstr = self.story.getMetadata('storyId')
- idnum = int(idstr)
- self.do_sleep()
-
- if idnum >= 1000:
- logger.warn("storyId:%s - Custom INI data will be used."%idstr)
-
- sections = ['teststory:%s'%idstr,'teststory:defaults']
- #print("self.get_config_list(sections,'valid_entries'):%s"%self.get_config_list(sections,'valid_entries'))
- for key in self.get_config_list(sections,'valid_entries'):
- if key.endswith("_list"):
- nkey = key[:-len("_list")]
- #print("addList:%s"%(nkey))
- for val in self.get_config_list(sections,key):
- #print("addList:%s->%s"%(nkey,val))
- self.story.addToList(nkey,ensure_text(val).replace('{{storyId}}',idstr))
- else:
- # Special cases:
- if key in ['datePublished','dateUpdated']:
- self.story.setMetadata(key,makeDate(self.get_config(sections,key),"%Y-%m-%d"))
- else:
- self.story.setMetadata(key,ensure_text(self.get_config(sections,key)).replace('{{storyId}}',idstr))
- #print("set:%s->%s"%(key,self.story.getMetadata(key)))
-
- if self.has_config(sections,'chapter_urls'):
- for l in self.get_config(sections,'chapter_urls').splitlines() :
- if l:
- self.add_chapter(l[1+l.index(','):],l[:l.index(',')])
- else:
- for (j,chap) in enumerate(self.get_config_list(sections,'chaptertitles'),start=1):
- self.add_chapter(chap,self.url+"&chapter=%d"%j)
-
- return
-
- if idnum >= 700 and idnum <= 710:
- self._setURL('http://test1.com?sid=%s'%(idnum+100))
- self.story.setMetadata('storyId',self.parsedUrl.query.split('=',)[1])
- idstr = self.story.getMetadata('storyId')
- idnum = int(idstr)
-
- if idstr == '665' and not (self.is_adult or self.getConfig("is_adult")):
- logger.warn("self.is_adult:%s"%self.is_adult)
- raise exceptions.AdultCheckRequired(self.url)
-
- if idstr == '666':
- raise exceptions.StoryDoesNotExist(self.url)
-
- if idstr.startswith('670'):
- time.sleep(1.0)
-
- if idstr.startswith('671'):
- time.sleep(1.0)
-
- if self.getConfig("username"):
- self.username = self.getConfig("username")
-
- if idstr == '668' and self.username != "Me" :
- raise exceptions.FailedToLogin(self.url,self.username)
-
- if idstr == '664':
- self.story.setMetadata(u'title',"Test Story Title "+idstr+self.crazystring+" ")
- self.story.setMetadata('author','Test Author aa bare amp(&) quote(') amp(&)')
- else:
- self.story.setMetadata(u'title',"Test Story Title "+idstr)
- self.story.setMetadata('author','Test Author aa')
- self.setDescription(self.url,u'
Description '+self.crazystring+u''' Done
-
-Some more longer description. "I suck at summaries!" "Better than it sounds!" "My first fic"
-
''')
- self.story.setMetadata('datePublished',makeDate("1975-03-15","%Y-%m-%d"))
- if idstr == '669':
- self.story.setMetadata('dateUpdated',datetime.datetime.now())
- else:
- self.story.setMetadata('dateUpdated',makeDate("1975-04-15","%Y-%m-%d"))
-
- if idstr != '674':
- self.story.setMetadata('numWords','123456')
-
- if idnum % 2 == 1:
- self.story.setMetadata('status','In-Progress')
- else:
- self.story.setMetadata('status','Completed')
-
- # greater than 10, no language or series.
- if idnum < 10:
- langs = {
- 0:"English",
- 1:"Russian",
- 2:"French",
- 3:"German",
- }
- self.story.setMetadata('language',langs[idnum%len(langs)])
- self.setSeries('The Great Test',idnum)
- self.story.setMetadata('seriesUrl','http://test1.com?seriesid=1')
- if idnum == 0:
- self.setSeries("A Nook Hyphen Test "+self.story.getMetadata('dateCreated'),idnum)
- self.story.setMetadata('seriesUrl','http://test1.com?seriesid=0')
-
- self.story.setMetadata('rating','Tweenie')
-
- if idstr == '673':
- self.story.addToList('author','Author From List 1')
- self.story.addToList('author','Author From List 2')
- self.story.addToList('author','Author From List 3')
- self.story.addToList('author','Author From List 4')
- self.story.addToList('author','Author From List 5')
- self.story.addToList('author','Author From List 6')
- self.story.addToList('author','Author From List 7')
- self.story.addToList('author','Author From List 8')
- self.story.addToList('author','Author From List 9')
- self.story.addToList('author','Author From List 0')
- self.story.addToList('author','Author From List q')
- self.story.addToList('author','Author From List w')
- self.story.addToList('author','Author From List e')
- self.story.addToList('author','Author From List r')
- self.story.addToList('author','Author From List t')
- self.story.addToList('author','Author From List y')
- self.story.addToList('author','Author From List u')
- self.story.addToList('author','Author From List i')
- self.story.addToList('author','Author From List o')
-
- self.story.addToList('authorId','98765-1')
- self.story.addToList('authorId','98765-2')
- self.story.addToList('authorId','98765-3')
- self.story.addToList('authorId','98765-4')
- self.story.addToList('authorId','98765-5')
- self.story.addToList('authorId','98765-6')
- self.story.addToList('authorId','98765-7')
- self.story.addToList('authorId','98765-8')
- self.story.addToList('authorId','98765-9')
- self.story.addToList('authorId','98765-0')
- self.story.addToList('authorId','98765-q')
- self.story.addToList('authorId','98765-w')
- self.story.addToList('authorId','98765-e')
- self.story.addToList('authorId','98765-r')
- self.story.addToList('authorId','98765-t')
- self.story.addToList('authorId','98765-y')
- self.story.addToList('authorId','98765-u')
- self.story.addToList('authorId','98765-i')
- self.story.addToList('authorId','98765-o')
-
- self.story.addToList('authorUrl','http://author/url-1')
- self.story.addToList('authorUrl','http://author/url-2')
- self.story.addToList('authorUrl','http://author/url-3')
- self.story.addToList('authorUrl','http://author/url-4')
- self.story.addToList('authorUrl','http://author/url-5')
- self.story.addToList('authorUrl','http://author/url-6')
- self.story.addToList('authorUrl','http://author/url-7')
- self.story.addToList('authorUrl','http://author/url-8')
- self.story.addToList('authorUrl','http://author/url-9')
- self.story.addToList('authorUrl','http://author/url-0')
- self.story.addToList('authorUrl','http://author/url-q')
- self.story.addToList('authorUrl','http://author/url-w')
- self.story.addToList('authorUrl','http://author/url-e')
- self.story.addToList('authorUrl','http://author/url-r')
- self.story.addToList('authorUrl','http://author/url-t')
- self.story.addToList('authorUrl','http://author/url-y')
- self.story.addToList('authorUrl','http://author/url-u')
- self.story.addToList('authorUrl','http://author/url-i')
- self.story.addToList('authorUrl','http://author/url-o')
-
- self.story.addToList('category','Power Rangers')
- self.story.addToList('category','SG-1')
- self.story.addToList('genre','Porn')
- self.story.addToList('genre','Drama')
- elif idnum < 1000:
- self.story.setMetadata('authorId','98765')
- self.story.setMetadata('authorUrl','http://author/url')
-
- self.story.addToList('warnings','Swearing')
- self.story.addToList('warnings','Violence')
-
- if idstr == '80':
- self.story.addToList('category',u'Rizzoli & Isles')
- self.story.addToList('characters','J. Rizzoli')
- elif idstr == '81':
- self.story.addToList('category',u'Pitch Perfect')
- self.story.addToList('characters','Chloe B.')
- elif idstr == '82':
- self.story.addToList('characters','Henry (Once Upon a Time)')
- self.story.addToList('category',u'Once Upon a Time (TV)')
- elif idstr == '83':
- self.story.addToList('category',u'Rizzoli & Isles')
- self.story.addToList('characters','J. Rizzoli')
- self.story.addToList('category',u'Pitch Perfect')
- self.story.addToList('characters','Chloe B.')
- self.story.addToList('ships','Chloe B. & J. Rizzoli')
- elif idstr == '90':
- self.story.setMetadata('characters','Henry (Once Upon a Time)')
- self.story.setMetadata('category',u'Once Upon a Time (TV)')
- else:
- self.story.addToList('category','Harry Potter')
- self.story.addToList('category','Furbie')
- self.story.addToList('category','Crossover')
- self.story.addToList('category',u'Puella Magi Madoka Magica/魔法少女まどか★マギカ')
- self.story.addToList('category',u'Magical Girl Lyrical Nanoha')
- self.story.addToList('category',u'Once Upon a Time (TV)')
- self.story.addToList('characters','Bob Smith')
- self.story.addToList('characters','George Johnson')
- self.story.addToList('characters','Fred Smythe')
- self.story.addToList('ships','Harry Potter/Ginny Weasley')
- self.story.addToList('ships','Harry Potter/Ginny Weasley/Albus Dumbledore')
- self.story.addToList('ships','Harry Potter & Hermione Granger')
-
- self.story.addToList('genre','Fantasy')
- self.story.addToList('genre','Comedy')
- self.story.addToList('genre','Sci-Fi')
- self.story.addToList('genre','Noir')
-
- self.story.addToList('listX','xVal1')
- self.story.addToList('listX','xVal2')
- self.story.addToList('listX','xVal3')
- self.story.addToList('listX','xVal4')
-
- self.story.addToList('listY','yVal1')
- self.story.addToList('listY','yVal2')
- self.story.addToList('listY','yVal3')
- self.story.addToList('listY','yVal4')
-
- self.story.addToList('listZ','zVal1')
- self.story.addToList('listZ','zVal2')
- self.story.addToList('listZ','zVal3')
- self.story.addToList('listZ','zVal4')
-
- self.story.setMetadata('metaA','98765')
- self.story.setMetadata('metaB','01245')
- self.story.setMetadata('metaC','The mighty metaC!')
-
- chapters = [(u'Prologue '+self.crazystring,self.url+"&chapter=1"),
- ('Chapter 1, Xenos on Cinnabar',self.url+"&chapter=2"),
- ('Chapter 2, Sinmay on Kintikin',self.url+"&chapter=3"),
- ('Chapter 3, Over Cinnabar',self.url+"&chapter=4"),
- ('Chapter 4',self.url+"&chapter=5"),
- ('Chapter 5',self.url+"&chapter=6"),
- ('Chapter 6',self.url+"&chapter=7"),
- ('Chapter 7',self.url+"&chapter=8"),
- ('Chapter 8',self.url+"&chapter=9"),
- #('Chapter 9',self.url+"&chapter=0"),
- #('Chapter 0',self.url+"&chapter=a"),
- #('Chapter a',self.url+"&chapter=b"),
- #('Chapter b',self.url+"&chapter=c"),
- #('Chapter c',self.url+"&chapter=d"),
- #('Chapter d',self.url+"&chapter=e"),
- #('Chapter e',self.url+"&chapter=f"),
- #('Chapter f',self.url+"&chapter=g"),
- #('Chapter g',self.url+"&chapter=h"),
- #('Chapter h',self.url+"&chapter=i"),
- #('Chapter i',self.url+"&chapter=j"),
- #('Chapter j',self.url+"&chapter=k"),
- #('Chapter k',self.url+"&chapter=l"),
- #('Chapter l',self.url+"&chapter=m"),
- #('Chapter m',self.url+"&chapter=n"),
- #('Chapter n',self.url+"&chapter=o"),
- ]
- for c in chapters:
- self.add_chapter(c[0],c[1],{'test':'asdf'})
-
-
- def getChapterText(self, url):
- logger.debug('Getting chapter text from: %s' % url)
- self.do_sleep()
- if self.story.getMetadata('storyId').startswith('670') or \
- self.story.getMetadata('storyId').startswith('672'):
- time.sleep(1.0)
-
- if "chapter=1" in url :
- text=u'''
-
-
Prologue
-
This is a fake adapter for testing purposes. Different sid's will give different errors:
-
sid>=1000 will use custom test story data from your configuration(personal.ini)
Lorem '''+self.crazystring+u''' italics, bold, underline consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
-Don't—e;ver—d;o—that—a;gain, 法 é
-
-horizontal rules
-
-
"Lorem ipsum dolor sit amet", consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore--et dolore magna aliqua. 'Ut enim ad minim veniam', quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
-
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
-
-'''
- soup = self.make_soup(text)
- return self.utf8FromSoup(url,soup)
-
-def getClass():
- return TestSiteAdapter
-
From 95124c0638f61e8f528a1357144ca83900b62391 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 7 Aug 2018 11:57:10 -0500
Subject: [PATCH 098/120] Require Python 2.7 or newer in CLI.
Dependencies(html5lib,etc) don't work on 2.6 anymore.
---
fanficfare/cli.py | 17 ++++++++++-------
setup.py | 23 +++++++++++++----------
2 files changed, 23 insertions(+), 17 deletions(-)
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 1d0f0d26..7273cfa7 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -28,9 +28,8 @@ import string
import os, sys
import pickle
-if sys.version_info < (2, 5):
- print('This program requires Python 2.5 or newer.')
- sys.exit(1)
+if sys.version_info < (2, 7):
+ sys.exit('This program requires Python 2.7 or newer.')
elif sys.version_info < (3, 0):
reload(sys) # Reload restores 'hidden' setdefaultencoding method
sys.setdefaultencoding("utf-8")
@@ -84,7 +83,6 @@ def main(argv=None,
parser=None,
passed_defaultsini=None,
passed_personalini=None):
- logger.debug("Python Version:%s"%sys.version)
if argv is None:
argv = sys.argv[1:]
# read in args, anything starting with -- will be treated as --=
@@ -173,13 +171,18 @@ def main(argv=None,
options, args = parser.parse_args(argv)
+ if not options.debug:
+ logger.setLevel(logging.WARNING)
+ else:
+ import platform
+ logger.debug(" OS Version:%s"%platform.platform())
+ logger.debug("Python Version:%s"%sys.version)
+ logger.debug(" FFF Version:%s"%version)
+
if options.version:
print("Version: %s" % version)
return
- if not options.debug:
- logger.setLevel(logging.WARNING)
-
list_only = any((options.imaplist,
options.siteslist,
options.list,
diff --git a/setup.py b/setup.py
index 897970ec..d6be030b 100644
--- a/setup.py
+++ b/setup.py
@@ -13,12 +13,18 @@ from setuptools import setup, find_packages
import codecs
from os import path
+package_name="FanFicFare"
+
+import sys
+if sys.version_info < (2,7):
+ sys.exit(package_name+' requires Python 2.7 or newer.')
+
# Get the long description from the relevant file
with codecs.open('DESCRIPTION.rst', encoding='utf-8') as f:
long_description = f.read()
-
+
setup(
- name="FanFicFare",
+ name=package_name,
# Versions should comply with PEP440.
version="2.37.0",
@@ -42,7 +48,7 @@ setup(
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
- 'Development Status :: 5 - Production/Stable',
+ 'Development Status :: 3 - Alpha',
'Environment :: Console',
@@ -55,13 +61,9 @@ setup(
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
- # 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
- # 'Programming Language :: Python :: 3',
- # 'Programming Language :: Python :: 3.2',
- # 'Programming Language :: Python :: 3.3',
- # 'Programming Language :: Python :: 3.4',
+ # Earlier py3 version may work, but I've not tested them.
+ 'Programming Language :: Python :: 3.7',
],
# What does your project relate to?
@@ -79,7 +81,8 @@ setup(
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
- install_requires=['beautifulsoup4','chardet','html5lib','html2text'], # html5lib requires 'six'.
+ install_requires=['beautifulsoup4','chardet','html5lib','html2text'],
+ # html5lib requires 'six', FFF includes it's own copy as fanficfare.six
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
From 56fbe15dc9cc7843eee68b2665a5a1afff7388e2 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 7 Aug 2018 11:59:46 -0500
Subject: [PATCH 099/120] Bump Test Version 2.37.1
---
calibre-plugin/__init__.py | 2 +-
fanficfare/cli.py | 2 +-
setup.py | 2 +-
webservice/app.yaml | 2 +-
webservice/index.html | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/calibre-plugin/__init__.py b/calibre-plugin/__init__.py
index 66da2095..1d96b215 100644
--- a/calibre-plugin/__init__.py
+++ b/calibre-plugin/__init__.py
@@ -33,7 +33,7 @@ except NameError:
from calibre.customize import InterfaceActionBase
# pulled out from FanFicFareBase for saving in prefs.py
-__version__ = (2, 37, 0)
+__version__ = (2, 37, 1)
## Apparently the name for this class doesn't matter--it was still
## 'demo' for the first few versions.
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 7273cfa7..f867216a 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -39,7 +39,7 @@ else: # > 3.0
def pickle_load(f):
return pickle.load(f,encoding="bytes")
-version="2.37.0"
+version="2.37.1"
os.environ['CURRENT_VERSION_ID']=version
if sys.version_info >= (2, 7):
diff --git a/setup.py b/setup.py
index d6be030b..c53198bf 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@ setup(
name=package_name,
# Versions should comply with PEP440.
- version="2.37.0",
+ version="2.37.1",
description='A tool for downloading fanfiction to eBook formats',
long_description=long_description,
diff --git a/webservice/app.yaml b/webservice/app.yaml
index 10b29ede..8da6b2f0 100644
--- a/webservice/app.yaml
+++ b/webservice/app.yaml
@@ -1,6 +1,6 @@
# ffd-retief-hrd fanficfare
application: fanficfare
-version: 2-37-0
+version: 2-37-1
runtime: python27
api_version: 1
threadsafe: true
diff --git a/webservice/index.html b/webservice/index.html
index ddb24f49..7f42dcf8 100644
--- a/webservice/index.html
+++ b/webservice/index.html
@@ -31,7 +31,7 @@
If you have any problems with this application, please
report them in
the FanFicFare Google Group. The
- previous version
+ previous version
is also available for you to use if necessary.
From c386df4e486ca9e919c3c0056420c8f3c1cc9504 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 7 Aug 2018 12:36:31 -0500
Subject: [PATCH 100/120] Correction for fanfiktion.de metadata parsing
---
fanficfare/adapters/adapter_fanfiktionde.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/fanficfare/adapters/adapter_fanfiktionde.py b/fanficfare/adapters/adapter_fanfiktionde.py
index 1f47f842..ef50a9aa 100644
--- a/fanficfare/adapters/adapter_fanfiktionde.py
+++ b/fanficfare/adapters/adapter_fanfiktionde.py
@@ -166,7 +166,7 @@ class FanFiktionDeAdapter(BaseSiteAdapter):
self.story.extendList('genre',genres[:genres.index(' / ')].split(', '))
self.story.setMetadata('rating', genres[genres.index(' / ')+3:])
- self.story.addToList('category',stripHTML(soup.find('span',id='ffcbox-story-topic-1')).split(' / ')[2])
+ self.story.addToList('category',stripHTML(soup.find('span',id='ffcbox-story-topic-1')).split('/')[2].strip())
try:
self.story.setMetadata('native_status', head.find_all('span',{'class':'titled-icon'})[3]['title'])
From 2779e159613fe067b714fd58b9972c4168b38660 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 7 Aug 2018 14:46:36 -0500
Subject: [PATCH 101/120] Fix for &<> entities in chapter titles.
---
fanficfare/adapters/base_adapter.py | 4 +--
fanficfare/htmlcleanup.py | 50 ++++++++++++++++-------------
fanficfare/story.py | 3 +-
3 files changed, 31 insertions(+), 26 deletions(-)
diff --git a/fanficfare/adapters/base_adapter.py b/fanficfare/adapters/base_adapter.py
index 39ada944..6a0a9de6 100644
--- a/fanficfare/adapters/base_adapter.py
+++ b/fanficfare/adapters/base_adapter.py
@@ -39,7 +39,7 @@ logger = logging.getLogger(__name__)
from ..story import Story
from ..configurable import Configurable
-from ..htmlcleanup import removeEntities, removeAllEntities, stripHTML
+from ..htmlcleanup import stripHTML
from ..exceptions import InvalidStoryURL
# quick convenience class
@@ -156,7 +156,7 @@ class BaseSiteAdapter(Configurable):
self.ignore_chapter_url_list = [ self.normalize_chapterurl(u) for u in self.getConfig('ignore_chapter_url_list').splitlines() ]
if self.normalize_chapterurl(url) not in self.ignore_chapter_url_list:
meta = defaultdict(unicode,othermeta) # copy othermeta
- meta.update({'title':stripHTML(title),'url':url}) # after other to make sure they are set
+ meta.update({'title':stripHTML(title,remove_all_entities=False),'url':url}) # after other to make sure they are set
self.chapterUrls.append(meta)
self.story.setMetadata('numChapters', self.num_chapters())
return True
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index b15dbf3b..6250350d 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -62,9 +62,10 @@ def _replaceNotEntities(data):
p = re.compile(r'&([a-zA-Z][-.a-zA-Z0-9]*);')
return p.sub(r'&\1', data)
-def stripHTML(soup):
+def stripHTML(soup, remove_all_entities=True):
if isinstance(soup,basestring):
- retval = removeAllEntities(re.sub(r'<[^>]+>','',"%s" % soup)).strip()
+ retval = removeEntities(re.sub(r'<[^>]+>','',"%s" % soup),
+ remove_all_entities=remove_all_entities).strip()
else:
# bs4 already converts all the entities to UTF8 chars.
retval = soup.get_text(strip=True)
@@ -77,48 +78,48 @@ def conditionalRemoveEntities(value):
return removeEntities(value).strip()
else:
return value
-
-def removeAllEntities(text):
- # Remove < < and &
- return removeEntities(text).replace('<', '<').replace('>', '>').replace('&', '&')
-def removeEntities(text, space_only=False):
+def removeAllEntities(text):
+ # Remove < < and & also
+ return removeEntities(text, remove_all_entities=True)
+
+def removeEntities(text, space_only=False, remove_all_entities=False):
+ # keeps &, < and > when remove_all_entities=False
if text is None:
return u""
-
+
if not isinstance(text,basestring):
- return unicode(text)
-
+ text = unicode(text)
+
try:
t = text
except (UnicodeEncodeError,UnicodeDecodeError) as e:
try:
- t = text.encode ('ascii', 'xmlcharrefreplace')
+ t = text.encode ('ascii', 'xmlcharrefreplace')
except (UnicodeEncodeError,UnicodeDecodeError) as e:
t = text
- text = t
+ text = t
# replace numeric versions of [&<>] with named versions,
# then replace named versions with actual characters,
text = re.sub(r'*38;','&',text)
text = re.sub(r'*60;','<',text)
text = re.sub(r'*62;','>',text)
-
+
# replace remaining entities with unicode value, such as ' -> '
text = _replaceNumberEntities(text)
# replace several named entities with character, such as — -> -
- # see constants.py for the list.
# reverse sort will put entities with ; before the same one without, when valid.
for e in reversed(sorted(entities.keys())):
v = entities[e]
if space_only and re.match(r"^[^\s]$", v, re.UNICODE | re.S):
# if not space
continue
- try:
- text = text.replace(e, v)
- except UnicodeDecodeError as ex:
- # for the pound symbol in constants.py
- text = text.replace(e, v.decode('utf-8'))
+ # try:
+ text = text.replace(e, v)
+ # except UnicodeDecodeError as ex:
+ # # for the pound symbol
+ # text = text.replace(e, v.decode('utf-8'))
# SGMLParser, and in turn, BeautifulStoneSoup doesn't parse
# entities terribly well and inserts (;) after something that
@@ -128,9 +129,14 @@ def removeEntities(text, space_only=False):
# this point, there should be *no* real entities left, so find
# these not-entities and removing them here should be safe.
text = _replaceNotEntities(text)
-
- # < < and & are the only html entities allowed in xhtml, put those back.
- return text.replace('&', '&').replace('<', '<').replace('>', '>')
+
+ if remove_all_entities:
+ text = text.replace('<', '<').replace('>', '>').replace('&', '&')
+ else:
+ # < > and & are the only html entities allowed in xhtml, put those back.
+ # They come out as < because _replaceNotEntities removes the ';'.
+ text = text.replace('&', '&').replace('<', '<').replace('>', '>')
+ return text
## Currently used(optionally) by adapter_lightnovelgatecom and
## adapter_wwwnovelallcom only. I hesitate to put the option in
diff --git a/fanficfare/story.py b/fanficfare/story.py
index c3beab41..2dc6c33b 100644
--- a/fanficfare/story.py
+++ b/fanficfare/story.py
@@ -1021,7 +1021,6 @@ class Story(Configurable):
def addChapter(self, chap, newchap=False):
# logger.debug("addChapter(%s,%s)"%(chap,newchap))
chapter = defaultdict(unicode,chap) # default unknown to empty string
- chapter['title'] = removeEntities(chapter['title'])
chapter['html'] = removeEntities(chapter['html'])
if self.getConfig('strip_chapter_numbers') and \
self.getConfig('chapter_title_strip_pattern'):
@@ -1039,7 +1038,7 @@ class Story(Configurable):
self.chapters.append(chapter)
def getChapters(self,fortoc=False):
- "Chapters will be dicts"
+ "Chapters will be defaultdicts(unicode)"
retval = []
## only add numbers if more than one chapter. Ditto (new) marks.
From a999544859e54012ad45fe0ad7d550ebaec2821d Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Tue, 7 Aug 2018 17:01:17 -0500
Subject: [PATCH 102/120] royalroadl.com now wants to be www.royalroad.com.
---
fanficfare/adapters/adapter_royalroadl.py | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/fanficfare/adapters/adapter_royalroadl.py b/fanficfare/adapters/adapter_royalroadl.py
index 959f6419..d2451b0f 100644
--- a/fanficfare/adapters/adapter_royalroadl.py
+++ b/fanficfare/adapters/adapter_royalroadl.py
@@ -93,18 +93,19 @@ class RoyalRoadAdapter(BaseSiteAdapter):
@staticmethod # must be @staticmethod, don't remove it.
def getSiteDomain():
# The site domain. Does have www here, if it uses it.
- return 'royalroadl.com'
+ # changed from royalroadl.com
+ return 'www.royalroad.com'
@classmethod
def getAcceptDomains(cls):
- return ['royalroadl.com','www.royalroadl.com']
+ return ['royalroad.com','royalroadl.com','www.royalroadl.com']
@classmethod
def getSiteExampleURLs(cls):
- return "https://royalroadl.com/fiction/3056"
+ return "https://www.royalroad.com/fiction/3056"
def getSiteURLPattern(self):
- return "https?"+re.escape("://")+r"(www\.|)royalroadl\.com/fiction/\d+(/.*)?$"
+ return "https?"+re.escape("://")+r"(www\.|)royalroadl?\.com/fiction/\d+(/.*)?$"
def use_pagecache(self):
'''
From 5be511916b1faf4e7ee5b65d1e77345793960845 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 10:29:39 -0500
Subject: [PATCH 103/120] Web service needs that UnicodeDecodeError exception
handler
---
fanficfare/htmlcleanup.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/fanficfare/htmlcleanup.py b/fanficfare/htmlcleanup.py
index 6250350d..fb0258a0 100644
--- a/fanficfare/htmlcleanup.py
+++ b/fanficfare/htmlcleanup.py
@@ -115,11 +115,11 @@ def removeEntities(text, space_only=False, remove_all_entities=False):
if space_only and re.match(r"^[^\s]$", v, re.UNICODE | re.S):
# if not space
continue
- # try:
- text = text.replace(e, v)
- # except UnicodeDecodeError as ex:
- # # for the pound symbol
- # text = text.replace(e, v.decode('utf-8'))
+ try:
+ text = text.replace(e, v)
+ except UnicodeDecodeError as ex:
+ # for the pound symbol
+ text = text.replace(e, v.decode('utf-8'))
# SGMLParser, and in turn, BeautifulStoneSoup doesn't parse
# entities terribly well and inserts (;) after something that
From 67698baf117914fee083903e4e0a4cd08572707f Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 10:05:48 -0500
Subject: [PATCH 104/120] Update included_dependencies to beautifulsoup4-4.6.1
---
included_dependencies/bs4/__init__.py | 91 ++++++--
included_dependencies/bs4/builder/__init__.py | 15 +-
.../bs4/builder/_htmlparser.py | 102 ++++++++-
included_dependencies/bs4/builder/_lxml.py | 18 +-
included_dependencies/bs4/dammit.py | 6 +-
included_dependencies/bs4/diagnose.py | 20 +-
included_dependencies/bs4/element.py | 216 ++++++++++++++----
7 files changed, 374 insertions(+), 94 deletions(-)
diff --git a/included_dependencies/bs4/__init__.py b/included_dependencies/bs4/__init__.py
index 46caac04..ac3c1720 100644
--- a/included_dependencies/bs4/__init__.py
+++ b/included_dependencies/bs4/__init__.py
@@ -21,14 +21,15 @@ http://www.crummy.com/software/BeautifulSoup/bs4/doc/
# found in the LICENSE file.
__author__ = "Leonard Richardson (leonardr@segfault.org)"
-__version__ = "4.5.3"
-__copyright__ = "Copyright (c) 2004-2017 Leonard Richardson"
+__version__ = "4.6.1"
+__copyright__ = "Copyright (c) 2004-2018 Leonard Richardson"
__license__ = "MIT"
__all__ = ['BeautifulSoup']
import os
import re
+import sys
import traceback
import warnings
@@ -82,14 +83,46 @@ class BeautifulSoup(Tag):
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
- NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, change code that looks like this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n"
+ NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, pass the additional argument 'features=\"%(parser)s\"' to the BeautifulSoup constructor.\n"
def __init__(self, markup="", features=None, builder=None,
parse_only=None, from_encoding=None, exclude_encodings=None,
**kwargs):
- """The Soup object is initialized as the 'root tag', and the
- provided markup (which can be a string or a file-like object)
- is fed into the underlying parser."""
+ """Constructor.
+
+ :param markup: A string or a file-like object representing
+ markup to be parsed.
+
+ :param features: Desirable features of the parser to be used. This
+ may be the name of a specific parser ("lxml", "lxml-xml",
+ "html.parser", or "html5lib") or it may be the type of markup
+ to be used ("html", "html5", "xml"). It's recommended that you
+ name a specific parser, so that Beautiful Soup gives you the
+ same results across platforms and virtual environments.
+
+ :param builder: A specific TreeBuilder to use instead of looking one
+ up based on `features`. You shouldn't need to use this.
+
+ :param parse_only: A SoupStrainer. Only parts of the document
+ matching the SoupStrainer will be considered. This is useful
+ when parsing part of a document that would otherwise be too
+ large to fit into memory.
+
+ :param from_encoding: A string indicating the encoding of the
+ document to be parsed. Pass this in if Beautiful Soup is
+ guessing wrongly about the document's encoding.
+
+ :param exclude_encodings: A list of strings indicating
+ encodings known to be wrong. Pass this in if you don't know
+ the document's encoding but you know Beautiful Soup's guess is
+ wrong.
+
+ :param kwargs: For backwards compatibility purposes, the
+ constructor accepts certain keyword arguments used in
+ Beautiful Soup 3. None of these arguments do anything in
+ Beautiful Soup 4 and there's no need to actually pass keyword
+ arguments into the constructor.
+ """
if 'convertEntities' in kwargs:
warnings.warn(
@@ -171,14 +204,35 @@ class BeautifulSoup(Tag):
else:
markup_type = "HTML"
- caller = traceback.extract_stack()[0]
- filename = caller[0]
- line_number = caller[1]
- warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict(
- filename=filename,
- line_number=line_number,
- parser=builder.NAME,
- markup_type=markup_type))
+ # This code adapted from warnings.py so that we get the same line
+ # of code as our warnings.warn() call gets, even if the answer is wrong
+ # (as it may be in a multithreading situation).
+ caller = None
+ try:
+ caller = sys._getframe(1)
+ except ValueError:
+ pass
+ if caller:
+ globals = caller.f_globals
+ line_number = caller.f_lineno
+ else:
+ globals = sys.__dict__
+ line_number= 1
+ filename = globals.get('__file__')
+ if filename:
+ fnl = filename.lower()
+ if fnl.endswith((".pyc", ".pyo")):
+ filename = filename[:-1]
+ if filename:
+ # If there is no filename at all, the user is most likely in a REPL,
+ # and the warning is not necessary.
+ values = dict(
+ filename=filename,
+ line_number=line_number,
+ parser=builder.NAME,
+ markup_type=markup_type
+ )
+ warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % values, stacklevel=2)
self.builder = builder
self.is_xml = builder.is_xml
@@ -215,8 +269,8 @@ class BeautifulSoup(Tag):
markup = markup.encode("utf8")
warnings.warn(
'"%s" looks like a filename, not markup. You should'
- 'probably open this file and pass the filehandle into'
- 'Beautiful Soup.' % markup)
+ ' probably open this file and pass the filehandle into'
+ ' Beautiful Soup.' % markup)
self._check_markup_is_url(markup)
for (self.markup, self.original_encoding, self.declared_html_encoding,
@@ -302,9 +356,10 @@ class BeautifulSoup(Tag):
self.preserve_whitespace_tag_stack = []
self.pushTag(self)
- def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
+ def new_tag(self, name, namespace=None, nsprefix=None, attrs={}, **kwattrs):
"""Create a new tag associated with this soup."""
- return Tag(None, self.builder, name, namespace, nsprefix, attrs)
+ kwattrs.update(attrs)
+ return Tag(None, self.builder, name, namespace, nsprefix, kwattrs)
def new_string(self, s, subclass=NavigableString):
"""Create a new NavigableString associated with this soup."""
diff --git a/included_dependencies/bs4/builder/__init__.py b/included_dependencies/bs4/builder/__init__.py
index 601979bf..21454e6f 100644
--- a/included_dependencies/bs4/builder/__init__.py
+++ b/included_dependencies/bs4/builder/__init__.py
@@ -93,7 +93,7 @@ class TreeBuilder(object):
preserve_whitespace_tags = set()
empty_element_tags = None # A tag will be considered an empty-element
# tag when and only when it has no contents.
-
+
# A value for these tag/attribute combinations is a space- or
# comma-separated list of CDATA, rather than a single CDATA.
cdata_list_attributes = {}
@@ -125,7 +125,7 @@ class TreeBuilder(object):
if self.empty_element_tags is None:
return True
return tag_name in self.empty_element_tags
-
+
def feed(self, markup):
raise NotImplementedError()
@@ -232,9 +232,14 @@ class HTMLTreeBuilder(TreeBuilder):
"""
preserve_whitespace_tags = HTMLAwareEntitySubstitution.preserve_whitespace_tags
- empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta',
- 'spacer', 'link', 'frame', 'base'])
-
+ empty_element_tags = set([
+ # These are from HTML5.
+ 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr',
+
+ # These are from earlier versions of HTML and are removed in HTML5.
+ 'basefont', 'bgsound', 'command', 'frame', 'image', 'isindex', 'nextid', 'spacer'
+ ])
+
# The HTML standard defines these attributes as containing a
# space-separated list of values, not a single value. That is,
# class="foo bar" means that the 'class' attribute has two values,
diff --git a/included_dependencies/bs4/builder/_htmlparser.py b/included_dependencies/bs4/builder/_htmlparser.py
index 823ca15a..ee6c685d 100644
--- a/included_dependencies/bs4/builder/_htmlparser.py
+++ b/included_dependencies/bs4/builder/_htmlparser.py
@@ -1,3 +1,4 @@
+# encoding: utf-8
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
# Use of this source code is governed by a BSD-style license that can be
@@ -52,7 +53,42 @@ from bs4.builder import (
HTMLPARSER = 'html.parser'
class BeautifulSoupHTMLParser(HTMLParser):
- def handle_starttag(self, name, attrs):
+
+ def __init__(self, *args, **kwargs):
+ HTMLParser.__init__(self, *args, **kwargs)
+
+ # Keep a list of empty-element tags that were encountered
+ # without an explicit closing tag. If we encounter a closing tag
+ # of this type, we'll associate it with one of those entries.
+ #
+ # This isn't a stack because we don't care about the
+ # order. It's a list of closing tags we've already handled and
+ # will ignore, assuming they ever show up.
+ self.already_closed_empty_element = []
+
+ def error(self, msg):
+ """In Python 3, HTMLParser subclasses must implement error(), although this
+ requirement doesn't appear to be documented.
+
+ In Python 2, HTMLParser implements error() as raising an exception.
+
+ In any event, this method is called only on very strange markup and our best strategy
+ is to pretend it didn't happen and keep going.
+ """
+ warnings.warn(msg)
+
+ def handle_startendtag(self, name, attrs):
+ # This is only called when the markup looks like
+ # .
+
+ # is_startend() tells handle_starttag not to close the tag
+ # just because its name matches a known empty-element tag. We
+ # know that this is an empty-element tag and we want to call
+ # handle_endtag ourselves.
+ tag = self.handle_starttag(name, attrs, handle_empty_element=False)
+ self.handle_endtag(name)
+
+ def handle_starttag(self, name, attrs, handle_empty_element=True):
# XXX namespace
attr_dict = {}
for key, value in attrs:
@@ -62,10 +98,34 @@ class BeautifulSoupHTMLParser(HTMLParser):
value = ''
attr_dict[key] = value
attrvalue = '""'
- self.soup.handle_starttag(name, None, None, attr_dict)
+ #print "START", name
+ tag = self.soup.handle_starttag(name, None, None, attr_dict)
+ if tag and tag.is_empty_element and handle_empty_element:
+ # Unlike other parsers, html.parser doesn't send separate end tag
+ # events for empty-element tags. (It's handled in
+ # handle_startendtag, but only if the original markup looked like
+ # .)
+ #
+ # So we need to call handle_endtag() ourselves. Since we
+ # know the start event is identical to the end event, we
+ # don't want handle_endtag() to cross off any previous end
+ # events for tags of this name.
+ self.handle_endtag(name, check_already_closed=False)
- def handle_endtag(self, name):
- self.soup.handle_endtag(name)
+ # But we might encounter an explicit closing tag for this tag
+ # later on. If so, we want to ignore it.
+ self.already_closed_empty_element.append(name)
+
+ def handle_endtag(self, name, check_already_closed=True):
+ #print "END", name
+ if check_already_closed and name in self.already_closed_empty_element:
+ # This is a redundant end tag for an empty-element tag.
+ # We've already called handle_endtag() for it, so just
+ # check it off the list.
+ # print "ALREADY CLOSED", name
+ self.already_closed_empty_element.remove(name)
+ else:
+ self.soup.handle_endtag(name)
def handle_data(self, data):
self.soup.handle_data(data)
@@ -81,11 +141,26 @@ class BeautifulSoupHTMLParser(HTMLParser):
else:
real_name = int(name)
- try:
- data = unichr(real_name)
- except (ValueError, OverflowError), e:
- data = u"\N{REPLACEMENT CHARACTER}"
-
+ data = None
+ if real_name < 256:
+ # HTML numeric entities are supposed to reference Unicode
+ # code points, but sometimes they reference code points in
+ # some other encoding (ahem, Windows-1252). E.g.
+ # instead of É for LEFT DOUBLE QUOTATION MARK. This
+ # code tries to detect this situation and compensate.
+ for encoding in (self.soup.original_encoding, 'windows-1252'):
+ if not encoding:
+ continue
+ try:
+ data = bytearray([real_name]).decode(encoding)
+ except UnicodeDecodeError, e:
+ pass
+ if not data:
+ try:
+ data = unichr(real_name)
+ except (ValueError, OverflowError), e:
+ pass
+ data = data or u"\N{REPLACEMENT CHARACTER}"
self.handle_data(data)
def handle_entityref(self, name):
@@ -93,7 +168,12 @@ class BeautifulSoupHTMLParser(HTMLParser):
if character is not None:
data = character
else:
- data = "&%s;" % name
+ # If this were XML, it would be ambiguous whether "&foo"
+ # was an character entity reference with a missing
+ # semicolon or the literal string "&foo". Since this is
+ # HTML, we have a complete list of all character entity references,
+ # and this one wasn't found, so assume it's the literal string "&foo".
+ data = "&%s" % name
self.handle_data(data)
def handle_comment(self, data):
@@ -165,10 +245,12 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder):
parser.soup = self.soup
try:
parser.feed(markup)
+ parser.close()
except HTMLParseError, e:
warnings.warn(RuntimeWarning(
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
raise e
+ parser.already_closed_empty_element = []
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
# 3.2.3 code. This ensures they don't treat markup like as a
diff --git a/included_dependencies/bs4/builder/_lxml.py b/included_dependencies/bs4/builder/_lxml.py
index d2ca2872..4a0f7de4 100644
--- a/included_dependencies/bs4/builder/_lxml.py
+++ b/included_dependencies/bs4/builder/_lxml.py
@@ -5,9 +5,13 @@ __all__ = [
'LXMLTreeBuilder',
]
+try:
+ from collections.abc import Callable # Python 3.6
+except ImportError , e:
+ from collections import Callable
+
from io import BytesIO
from StringIO import StringIO
-import collections
from lxml import etree
from bs4.element import (
Comment,
@@ -58,7 +62,7 @@ class LXMLTreeBuilderForXML(TreeBuilder):
# Use the default parser.
parser = self.default_parser(encoding)
- if isinstance(parser, collections.Callable):
+ if isinstance(parser, Callable):
# Instantiate the parser with default arguments
parser = parser(target=self, strip_cdata=False, encoding=encoding)
return parser
@@ -147,11 +151,11 @@ class LXMLTreeBuilderForXML(TreeBuilder):
attrs = dict(attrs)
nsprefix = None
# Invert each namespace map as it comes in.
- if len(self.nsmaps) > 1:
- # There are no new namespaces for this tag, but
- # non-default namespaces are in play, so we need a
- # separate tag stack to know when they end.
- self.nsmaps.append(None)
+ if len(nsmap) == 0 and len(self.nsmaps) > 1:
+ # There are no new namespaces for this tag, but
+ # non-default namespaces are in play, so we need a
+ # separate tag stack to know when they end.
+ self.nsmaps.append(None)
elif len(nsmap) > 0:
# A new namespace mapping has come into play.
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
diff --git a/included_dependencies/bs4/dammit.py b/included_dependencies/bs4/dammit.py
index 7965565f..be46b394 100644
--- a/included_dependencies/bs4/dammit.py
+++ b/included_dependencies/bs4/dammit.py
@@ -46,9 +46,9 @@ except ImportError:
pass
xml_encoding_re = re.compile(
- '^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
+ '^<\\?.*encoding=[\'"](.*?)[\'"].*\\?>'.encode(), re.I)
html_meta_re = re.compile(
- '<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
+ '<\\s*meta[^>]+charset\\s*=\\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
class EntitySubstitution(object):
@@ -82,7 +82,7 @@ class EntitySubstitution(object):
}
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
- "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
+ "&(?!#\\d+;|#x[0-9a-fA-F]+;|\\w+;)"
")")
AMPERSAND_OR_BRACKET = re.compile("([<>&])")
diff --git a/included_dependencies/bs4/diagnose.py b/included_dependencies/bs4/diagnose.py
index 8768332f..7a28c09a 100644
--- a/included_dependencies/bs4/diagnose.py
+++ b/included_dependencies/bs4/diagnose.py
@@ -37,7 +37,7 @@ def diagnose(data):
name)
if 'lxml' in basic_parsers:
- basic_parsers.append(["lxml", "xml"])
+ basic_parsers.append("lxml-xml")
try:
from lxml import etree
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
@@ -56,21 +56,27 @@ def diagnose(data):
if hasattr(data, 'read'):
data = data.read()
- elif os.path.exists(data):
- print '"%s" looks like a filename. Reading data from the file.' % data
- with open(data) as fp:
- data = fp.read()
elif data.startswith("http:") or data.startswith("https:"):
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
return
- print
+ else:
+ try:
+ if os.path.exists(data):
+ print '"%s" looks like a filename. Reading data from the file.' % data
+ with open(data) as fp:
+ data = fp.read()
+ except ValueError:
+ # This can happen on some platforms when the 'filename' is
+ # too long. Assume it's data and not a filename.
+ pass
+ print
for parser in basic_parsers:
print "Trying to parse your markup with %s" % parser
success = False
try:
- soup = BeautifulSoup(data, parser)
+ soup = BeautifulSoup(data, features=parser)
success = True
except Exception, e:
print "%s could not parse the markup." % parser
diff --git a/included_dependencies/bs4/element.py b/included_dependencies/bs4/element.py
index b100d18b..8383c3fc 100644
--- a/included_dependencies/bs4/element.py
+++ b/included_dependencies/bs4/element.py
@@ -2,7 +2,10 @@
# found in the LICENSE file.
__license__ = "MIT"
-import collections
+try:
+ from collections.abc import Callable # Python 3.6
+except ImportError , e:
+ from collections import Callable
import re
import shlex
import sys
@@ -12,7 +15,7 @@ from bs4.dammit import EntitySubstitution
DEFAULT_OUTPUT_ENCODING = "utf-8"
PY3K = (sys.version_info[0] > 2)
-whitespace_re = re.compile("\s+")
+whitespace_re = re.compile(r"\s+")
def _alias(attr):
"""Alias one attribute name to another for backward compatibility"""
@@ -69,7 +72,7 @@ class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution):
The value of the 'content' attribute will be one of these objects.
"""
- CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M)
+ CHARSET_RE = re.compile(r"((^|;)\s*charset=)([^;]*)", re.M)
def __new__(cls, original_value):
match = cls.CHARSET_RE.search(original_value)
@@ -123,6 +126,41 @@ class HTMLAwareEntitySubstitution(EntitySubstitution):
return cls._substitute_if_appropriate(
ns, EntitySubstitution.substitute_xml)
+class Formatter(object):
+ """Contains information about how to format a parse tree."""
+
+ # By default, represent void elements as rather than
+ void_element_close_prefix = '/'
+
+ def substitute_entities(self, *args, **kwargs):
+ """Transform certain characters into named entities."""
+ raise NotImplementedError()
+
+class HTMLFormatter(Formatter):
+ """The default HTML formatter."""
+ def substitute(self, *args, **kwargs):
+ return HTMLAwareEntitySubstitution.substitute_html(*args, **kwargs)
+
+class MinimalHTMLFormatter(Formatter):
+ """A minimal HTML formatter."""
+ def substitute(self, *args, **kwargs):
+ return HTMLAwareEntitySubstitution.substitute_xml(*args, **kwargs)
+
+class HTML5Formatter(HTMLFormatter):
+ """An HTML formatter that omits the slash in a void tag."""
+ void_element_close_prefix = None
+
+class XMLFormatter(Formatter):
+ """Substitute only the essential XML entities."""
+ def substitute(self, *args, **kwargs):
+ return EntitySubstitution.substitute_xml(*args, **kwargs)
+
+class HTMLXMLFormatter(Formatter):
+ """Format XML using HTML rules."""
+ def substitute(self, *args, **kwargs):
+ return HTMLAwareEntitySubstitution.substitute_html(*args, **kwargs)
+
+
class PageElement(object):
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
@@ -132,39 +170,48 @@ class PageElement(object):
#
# "html" - All Unicode characters with corresponding HTML entities
# are converted to those entities on output.
+ # "html5" - The same as "html", but empty void tags are represented as
+ # rather than
# "minimal" - Bare ampersands and angle brackets are converted to
# XML entities: & < >
# None - The null formatter. Unicode characters are never
# converted to entities. This is not recommended, but it's
# faster than "minimal".
- # A function - This function will be called on every string that
+ # A callable function - it will be called on every string that needs to undergo entity substitution.
+ # A Formatter instance - Formatter.substitute(string) will be called on every string that
# needs to undergo entity substitution.
#
- # In an HTML document, the default "html" and "minimal" functions
- # will leave the contents of ')
# => <script> do_nasty_stuff() </script>
@@ -782,7 +813,7 @@ class Filter(base.Filter):
# characters, nor why we call unescape. I just know it's always been here.
# Should you be worried by this comment in a sanitizer? Yes. On the other hand, all
# this will do is remove *more* than it otherwise would.
- val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\s]+", '',
+ val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '',
unescape(attrs[attr])).lower()
# remove replacement characters from unescaped characters
val_unescaped = val_unescaped.replace("\ufffd", "")
@@ -807,7 +838,7 @@ class Filter(base.Filter):
' ',
unescape(attrs[attr]))
if (token["name"] in self.svg_allow_local_href and
- (namespaces['xlink'], 'href') in attrs and re.search('^\s*[^#\s].*',
+ (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*',
attrs[(namespaces['xlink'], 'href')])):
del attrs[(namespaces['xlink'], 'href')]
if (None, 'style') in attrs:
@@ -837,16 +868,16 @@ class Filter(base.Filter):
def sanitize_css(self, style):
# disallow urls
- style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
+ style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
- if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
+ if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
return ''
- if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
+ if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
return ''
clean = []
- for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style):
+ for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style):
if not value:
continue
if prop.lower() in self.allowed_css_properties:
@@ -855,7 +886,7 @@ class Filter(base.Filter):
'padding']:
for keyword in value.split():
if keyword not in self.allowed_css_keywords and \
- not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa
+ not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa
break
else:
clean.append(prop + ': ' + value + ';')
diff --git a/included_dependencies/html5lib/filters/whitespace.py b/included_dependencies/html5lib/filters/whitespace.py
index 89210528..0d12584b 100644
--- a/included_dependencies/html5lib/filters/whitespace.py
+++ b/included_dependencies/html5lib/filters/whitespace.py
@@ -10,7 +10,7 @@ SPACES_REGEX = re.compile("[%s]+" % spaceCharacters)
class Filter(base.Filter):
-
+ """Collapses whitespace except in pre, textarea, and script elements"""
spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements))
def __iter__(self):
diff --git a/included_dependencies/html5lib/html5parser.py b/included_dependencies/html5lib/html5parser.py
index 2abd63e4..9d39b9d4 100644
--- a/included_dependencies/html5lib/html5parser.py
+++ b/included_dependencies/html5lib/html5parser.py
@@ -1,12 +1,8 @@
from __future__ import absolute_import, division, unicode_literals
-from six import with_metaclass, viewkeys, PY3
+from six import with_metaclass, viewkeys
import types
-
-try:
- from collections import OrderedDict
-except ImportError:
- from ordereddict import OrderedDict
+from collections import OrderedDict
from . import _inputstream
from . import _tokenizer
@@ -24,18 +20,53 @@ from .constants import (
adjustForeignAttributes as adjustForeignAttributesMap,
adjustMathMLAttributes, adjustSVGAttributes,
E,
- ReparseException
+ _ReparseException
)
def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
- """Parse a string or file-like object into a tree"""
+ """Parse an HTML document as a string or file-like object into a tree
+
+ :arg doc: the document to parse as a string or file-like object
+
+ :arg treebuilder: the treebuilder to use when parsing
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5parser import parse
+ >>> parse('
This is a doc
')
+
+
+ """
tb = treebuilders.getTreeBuilder(treebuilder)
p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
return p.parse(doc, **kwargs)
def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs):
+ """Parse an HTML fragment as a string or file-like object into a tree
+
+ :arg doc: the fragment to parse as a string or file-like object
+
+ :arg container: the container context to parse the fragment in
+
+ :arg treebuilder: the treebuilder to use when parsing
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5libparser import parseFragment
+ >>> parseFragment('this is a fragment')
+
+
+ """
tb = treebuilders.getTreeBuilder(treebuilder)
p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
return p.parseFragment(doc, container=container, **kwargs)
@@ -54,16 +85,30 @@ def method_decorator_metaclass(function):
class HTMLParser(object):
- """HTML parser. Generates a tree structure from a stream of (possibly
- malformed) HTML"""
+ """HTML parser
+
+ Generates a tree structure from a stream of (possibly malformed) HTML.
+
+ """
def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):
"""
- strict - raise an exception when a parse error is encountered
+ :arg tree: a treebuilder class controlling the type of tree that will be
+ returned. Built in treebuilders can be accessed through
+ html5lib.treebuilders.getTreeBuilder(treeType)
+
+ :arg strict: raise an exception when a parse error is encountered
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :arg debug: whether or not to enable debug mode which logs things
+
+ Example:
+
+ >>> from html5lib.html5parser import HTMLParser
+ >>> parser = HTMLParser() # generates parser with etree builder
+ >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict
- tree - a treebuilder class controlling the type of tree that will be
- returned. Built in treebuilders can be accessed through
- html5lib.treebuilders.getTreeBuilder(treeType)
"""
# Raise an exception on the first error encountered
@@ -87,7 +132,7 @@ class HTMLParser(object):
try:
self.mainLoop()
- except ReparseException:
+ except _ReparseException:
self.reset()
self.mainLoop()
@@ -127,9 +172,8 @@ class HTMLParser(object):
@property
def documentEncoding(self):
- """The name of the character encoding
- that was used to decode the input stream,
- or :obj:`None` if that is not determined yet.
+ """Name of the character encoding that was used to decode the input stream, or
+ :obj:`None` if that is not determined yet
"""
if not hasattr(self, 'tokenizer'):
@@ -223,14 +267,24 @@ class HTMLParser(object):
def parse(self, stream, *args, **kwargs):
"""Parse a HTML document into a well-formed tree
- stream - a filelike object or string containing the HTML to be parsed
+ :arg stream: a file-like object or string containing the HTML to be parsed
- The optional encoding parameter must be a string that indicates
- the encoding. If specified, that encoding will be used,
- regardless of any BOM or later declaration (such as in a meta
- element)
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element).
+
+ :arg scripting: treat noscript elements as if JavaScript was turned on
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5parser import HTMLParser
+ >>> parser = HTMLParser()
+ >>> parser.parse('
This is a doc
')
+
- scripting - treat noscript elements as if javascript was turned on
"""
self._parse(stream, False, None, *args, **kwargs)
return self.tree.getDocument()
@@ -238,17 +292,27 @@ class HTMLParser(object):
def parseFragment(self, stream, *args, **kwargs):
"""Parse a HTML fragment into a well-formed tree fragment
- container - name of the element we're setting the innerHTML property
- if set to None, default to 'div'
+ :arg container: name of the element we're setting the innerHTML
+ property if set to None, default to 'div'
- stream - a filelike object or string containing the HTML to be parsed
+ :arg stream: a file-like object or string containing the HTML to be parsed
- The optional encoding parameter must be a string that indicates
- the encoding. If specified, that encoding will be used,
- regardless of any BOM or later declaration (such as in a meta
- element)
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element)
+
+ :arg scripting: treat noscript elements as if JavaScript was turned on
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5libparser import HTMLParser
+ >>> parser = HTMLParser()
+ >>> parser.parseFragment('this is a fragment')
+
- scripting - treat noscript elements as if javascript was turned on
"""
self._parse(stream, True, *args, **kwargs)
return self.tree.getFragment()
@@ -262,8 +326,7 @@ class HTMLParser(object):
raise ParseError(E[errorcode] % datavars)
def normalizeToken(self, token):
- """ HTML5 specific normalizations to the token stream """
-
+ # HTML5 specific normalizations to the token stream
if token["type"] == tokenTypes["StartTag"]:
raw = token["data"]
token["data"] = OrderedDict(raw)
@@ -331,9 +394,7 @@ class HTMLParser(object):
self.phase = new_phase
def parseRCDataRawtext(self, token, contentType):
- """Generic RCDATA/RAWTEXT Parsing algorithm
- contentType - RCDATA or RAWTEXT
- """
+ # Generic RCDATA/RAWTEXT Parsing algorithm
assert contentType in ("RAWTEXT", "RCDATA")
self.tree.insertElement(token)
@@ -2711,10 +2772,7 @@ def getPhases(debug):
def adjust_attributes(token, replacements):
- if PY3 or _utils.PY27:
- needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)
- else:
- needs_adjustment = frozenset(token['data']) & frozenset(replacements)
+ needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)
if needs_adjustment:
token['data'] = OrderedDict((replacements.get(k, k), v)
for k, v in token['data'].items())
diff --git a/included_dependencies/html5lib/serializer.py b/included_dependencies/html5lib/serializer.py
index 103dd206..d6b7105d 100644
--- a/included_dependencies/html5lib/serializer.py
+++ b/included_dependencies/html5lib/serializer.py
@@ -68,10 +68,33 @@ def htmlentityreplace_errors(exc):
else:
return xmlcharrefreplace_errors(exc)
+
register_error("htmlentityreplace", htmlentityreplace_errors)
def serialize(input, tree="etree", encoding=None, **serializer_opts):
+ """Serializes the input token stream using the specified treewalker
+
+ :arg input: the token stream to serialize
+
+ :arg tree: the treewalker to use
+
+ :arg encoding: the encoding to use
+
+ :arg serializer_opts: any options to pass to the
+ :py:class:`html5lib.serializer.HTMLSerializer` that gets created
+
+ :returns: the tree serialized as a string
+
+ Example:
+
+ >>> from html5lib.html5parser import parse
+ >>> from html5lib.serializer import serialize
+ >>> token_stream = parse('
'
+
+ """
# XXX: Should we cache this?
walker = treewalkers.getTreeWalker(tree)
s = HTMLSerializer(**serializer_opts)
@@ -110,50 +133,83 @@ class HTMLSerializer(object):
"strip_whitespace", "sanitize")
def __init__(self, **kwargs):
- """Initialize HTMLSerializer.
+ """Initialize HTMLSerializer
- Keyword options (default given first unless specified) include:
+ :arg inject_meta_charset: Whether or not to inject the meta charset.
- inject_meta_charset=True|False
- Whether it insert a meta element to define the character set of the
- document.
- quote_attr_values="legacy"|"spec"|"always"
- Whether to quote attribute values that don't require quoting
- per legacy browser behaviour, when required by the standard, or always.
- quote_char=u'"'|u"'"
- Use given quote character for attribute quoting. Default is to
- use double quote unless attribute value contains a double quote,
- in which case single quotes are used instead.
- escape_lt_in_attrs=False|True
- Whether to escape < in attribute values.
- escape_rcdata=False|True
- Whether to escape characters that need to be escaped within normal
- elements within rcdata elements such as style.
- resolve_entities=True|False
- Whether to resolve named character entities that appear in the
- source tree. The XML predefined entities < > & " '
- are unaffected by this setting.
- strip_whitespace=False|True
- Whether to remove semantically meaningless whitespace. (This
- compresses all whitespace to a single space except within pre.)
- minimize_boolean_attributes=True|False
- Shortens boolean attributes to give just the attribute value,
- for example becomes .
- use_trailing_solidus=False|True
- Includes a close-tag slash at the end of the start tag of void
- elements (empty elements whose end tag is forbidden). E.g. .
- space_before_trailing_solidus=True|False
- Places a space immediately before the closing slash in a tag
- using a trailing solidus. E.g. . Requires use_trailing_solidus.
- sanitize=False|True
- Strip all unsafe or unknown constructs from output.
- See `html5lib user documentation`_
- omit_optional_tags=True|False
- Omit start/end tags that are optional.
- alphabetical_attributes=False|True
- Reorder attributes to be in alphabetical order.
+ Defaults to ``True``.
+
+ :arg quote_attr_values: Whether to quote attribute values that don't
+ require quoting per legacy browser behavior (``"legacy"``), when
+ required by the standard (``"spec"``), or always (``"always"``).
+
+ Defaults to ``"legacy"``.
+
+ :arg quote_char: Use given quote character for attribute quoting.
+
+ Defaults to ``"`` which will use double quotes unless attribute
+ value contains a double quote, in which case single quotes are
+ used.
+
+ :arg escape_lt_in_attrs: Whether or not to escape ``<`` in attribute
+ values.
+
+ Defaults to ``False``.
+
+ :arg escape_rcdata: Whether to escape characters that need to be
+ escaped within normal elements within rcdata elements such as
+ style.
+
+ Defaults to ``False``.
+
+ :arg resolve_entities: Whether to resolve named character entities that
+ appear in the source tree. The XML predefined entities < >
+ & " ' are unaffected by this setting.
+
+ Defaults to ``True``.
+
+ :arg strip_whitespace: Whether to remove semantically meaningless
+ whitespace. (This compresses all whitespace to a single space
+ except within ``pre``.)
+
+ Defaults to ``False``.
+
+ :arg minimize_boolean_attributes: Shortens boolean attributes to give
+ just the attribute value, for example::
+
+
+
+ becomes::
+
+
+
+ Defaults to ``True``.
+
+ :arg use_trailing_solidus: Includes a close-tag slash at the end of the
+ start tag of void elements (empty elements whose end tag is
+ forbidden). E.g. ````.
+
+ Defaults to ``False``.
+
+ :arg space_before_trailing_solidus: Places a space immediately before
+ the closing slash in a tag using a trailing solidus. E.g.
+ ````. Requires ``use_trailing_solidus=True``.
+
+ Defaults to ``True``.
+
+ :arg sanitize: Strip all unsafe or unknown constructs from output.
+ See :py:class:`html5lib.filters.sanitizer.Filter`.
+
+ Defaults to ``False``.
+
+ :arg omit_optional_tags: Omit start/end tags that are optional.
+
+ Defaults to ``True``.
+
+ :arg alphabetical_attributes: Reorder attributes to be in alphabetical order.
+
+ Defaults to ``False``.
- .. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
"""
unexpected_args = frozenset(kwargs) - frozenset(self.options)
if len(unexpected_args) > 0:
@@ -317,6 +373,25 @@ class HTMLSerializer(object):
self.serializeError(token["data"])
def render(self, treewalker, encoding=None):
+ """Serializes the stream from the treewalker into a string
+
+ :arg treewalker: the treewalker to serialize
+
+ :arg encoding: the string encoding to use
+
+ :returns: the serialized tree
+
+ Example:
+
+ >>> from html5lib import parse, getTreeWalker
+ >>> from html5lib.serializer import HTMLSerializer
+ >>> token_stream = parse('Hi!')
+ >>> walker = getTreeWalker('etree')
+ >>> serializer = HTMLSerializer(omit_optional_tags=False)
+ >>> serializer.render(walker(token_stream))
+ 'Hi!'
+
+ """
if encoding:
return b"".join(list(self.serialize(treewalker, encoding)))
else:
diff --git a/included_dependencies/html5lib/treeadapters/__init__.py b/included_dependencies/html5lib/treeadapters/__init__.py
index 4f978466..dfeb0ba5 100644
--- a/included_dependencies/html5lib/treeadapters/__init__.py
+++ b/included_dependencies/html5lib/treeadapters/__init__.py
@@ -1,3 +1,21 @@
+"""Tree adapters let you convert from one tree structure to another
+
+Example:
+
+.. code-block:: python
+
+ import html5lib
+ from html5lib.treeadapters import genshi
+
+ doc = 'Hi!'
+ treebuilder = html5lib.getTreeBuilder('etree')
+ parser = html5lib.HTMLParser(tree=treebuilder)
+ tree = parser.parse(doc)
+ TreeWalker = html5lib.getTreeWalker('etree')
+
+ genshi_tree = genshi.to_genshi(TreeWalker(tree))
+
+"""
from __future__ import absolute_import, division, unicode_literals
from . import sax
diff --git a/included_dependencies/html5lib/treeadapters/genshi.py b/included_dependencies/html5lib/treeadapters/genshi.py
index 04e316df..61d5fb6a 100644
--- a/included_dependencies/html5lib/treeadapters/genshi.py
+++ b/included_dependencies/html5lib/treeadapters/genshi.py
@@ -5,6 +5,13 @@ from genshi.core import START, END, TEXT, COMMENT, DOCTYPE
def to_genshi(walker):
+ """Convert a tree to a genshi tree
+
+ :arg walker: the treewalker to use to walk the tree to convert it
+
+ :returns: generator of genshi nodes
+
+ """
text = []
for token in walker:
type = token["type"]
diff --git a/included_dependencies/html5lib/treeadapters/sax.py b/included_dependencies/html5lib/treeadapters/sax.py
index ad47df95..f4ccea5a 100644
--- a/included_dependencies/html5lib/treeadapters/sax.py
+++ b/included_dependencies/html5lib/treeadapters/sax.py
@@ -11,7 +11,13 @@ for prefix, localName, namespace in adjustForeignAttributes.values():
def to_sax(walker, handler):
- """Call SAX-like content handler based on treewalker walker"""
+ """Call SAX-like content handler based on treewalker walker
+
+ :arg walker: the treewalker to use to walk the tree to convert it
+
+ :arg handler: SAX handler to use
+
+ """
handler.startDocument()
for prefix, namespace in prefix_mapping.items():
handler.startPrefixMapping(prefix, namespace)
diff --git a/included_dependencies/html5lib/treebuilders/__init__.py b/included_dependencies/html5lib/treebuilders/__init__.py
index e2328847..d44447ea 100644
--- a/included_dependencies/html5lib/treebuilders/__init__.py
+++ b/included_dependencies/html5lib/treebuilders/__init__.py
@@ -1,29 +1,32 @@
-"""A collection of modules for building different kinds of tree from
-HTML documents.
+"""A collection of modules for building different kinds of trees from HTML
+documents.
To create a treebuilder for a new type of tree, you need to do
implement several things:
-1) A set of classes for various types of elements: Document, Doctype,
-Comment, Element. These must implement the interface of
-_base.treebuilders.Node (although comment nodes have a different
-signature for their constructor, see treebuilders.etree.Comment)
-Textual content may also be implemented as another node type, or not, as
-your tree implementation requires.
+1. A set of classes for various types of elements: Document, Doctype, Comment,
+ Element. These must implement the interface of ``base.treebuilders.Node``
+ (although comment nodes have a different signature for their constructor,
+ see ``treebuilders.etree.Comment``) Textual content may also be implemented
+ as another node type, or not, as your tree implementation requires.
-2) A treebuilder object (called TreeBuilder by convention) that
-inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
-documentClass - the class to use for the bottommost node of a document
-elementClass - the class to use for HTML Elements
-commentClass - the class to use for comments
-doctypeClass - the class to use for doctypes
-It also has one required method:
-getDocument - Returns the root node of the complete document tree
+2. A treebuilder object (called ``TreeBuilder`` by convention) that inherits
+ from ``treebuilders.base.TreeBuilder``. This has 4 required attributes:
+
+ * ``documentClass`` - the class to use for the bottommost node of a document
+ * ``elementClass`` - the class to use for HTML Elements
+ * ``commentClass`` - the class to use for comments
+ * ``doctypeClass`` - the class to use for doctypes
+
+ It also has one required method:
+
+ * ``getDocument`` - Returns the root node of the complete document tree
+
+3. If you wish to run the unit tests, you must also create a ``testSerializer``
+ method on your treebuilder which accepts a node and returns a string
+ containing Node and its children serialized according to the format used in
+ the unittests
-3) If you wish to run the unit tests, you must also create a
-testSerializer method on your treebuilder which accepts a node and
-returns a string containing Node and its children serialized according
-to the format used in the unittests
"""
from __future__ import absolute_import, division, unicode_literals
@@ -34,23 +37,32 @@ treeBuilderCache = {}
def getTreeBuilder(treeType, implementation=None, **kwargs):
- """Get a TreeBuilder class for various types of tree with built-in support
+ """Get a TreeBuilder class for various types of trees with built-in support
- treeType - the name of the tree type required (case-insensitive). Supported
- values are:
+ :arg treeType: the name of the tree type required (case-insensitive). Supported
+ values are:
- "dom" - A generic builder for DOM implementations, defaulting to
- a xml.dom.minidom based implementation.
- "etree" - A generic builder for tree implementations exposing an
- ElementTree-like interface, defaulting to
- xml.etree.cElementTree if available and
- xml.etree.ElementTree if not.
- "lxml" - A etree-based builder for lxml.etree, handling
- limitations of lxml's implementation.
+ * "dom" - A generic builder for DOM implementations, defaulting to a
+ xml.dom.minidom based implementation.
+ * "etree" - A generic builder for tree implementations exposing an
+ ElementTree-like interface, defaulting to xml.etree.cElementTree if
+ available and xml.etree.ElementTree if not.
+ * "lxml" - A etree-based builder for lxml.etree, handling limitations
+ of lxml's implementation.
- implementation - (Currently applies to the "etree" and "dom" tree types). A
- module implementing the tree type e.g.
- xml.etree.ElementTree or xml.etree.cElementTree."""
+ :arg implementation: (Currently applies to the "etree" and "dom" tree
+ types). A module implementing the tree type e.g. xml.etree.ElementTree
+ or xml.etree.cElementTree.
+
+ :arg kwargs: Any additional options to pass to the TreeBuilder when
+ creating it.
+
+ Example:
+
+ >>> from html5lib.treebuilders import getTreeBuilder
+ >>> builder = getTreeBuilder('etree')
+
+ """
treeType = treeType.lower()
if treeType not in treeBuilderCache:
diff --git a/included_dependencies/html5lib/treebuilders/base.py b/included_dependencies/html5lib/treebuilders/base.py
index a4b2792a..05d97ecc 100644
--- a/included_dependencies/html5lib/treebuilders/base.py
+++ b/included_dependencies/html5lib/treebuilders/base.py
@@ -21,22 +21,25 @@ listElementsMap = {
class Node(object):
+ """Represents an item in the tree"""
def __init__(self, name):
- """Node representing an item in the tree.
- name - The tag name associated with the node
- parent - The parent of the current node (or None for the document node)
- value - The value of the current node (applies to text nodes and
- comments
- attributes - a dict holding name, value pairs for attributes of the node
- childNodes - a list of child nodes of the current node. This must
- include all elements but not necessarily other node types
- _flags - A list of miscellaneous flags that can be set on the node
+ """Creates a Node
+
+ :arg name: The tag name associated with the node
+
"""
+ # The tag name assocaited with the node
self.name = name
+ # The parent of the current node (or None for the document node)
self.parent = None
+ # The value of the current node (applies to text nodes and comments)
self.value = None
+ # A dict holding name -> value pairs for attributes of the node
self.attributes = {}
+ # A list of child nodes of the current node. This must include all
+ # elements but not necessarily other node types.
self.childNodes = []
+ # A list of miscellaneous flags that can be set on the node.
self._flags = []
def __str__(self):
@@ -53,23 +56,41 @@ class Node(object):
def appendChild(self, node):
"""Insert node as a child of the current node
+
+ :arg node: the node to insert
+
"""
raise NotImplementedError
def insertText(self, data, insertBefore=None):
"""Insert data as text in the current node, positioned before the
start of node insertBefore or to the end of the node's text.
+
+ :arg data: the data to insert
+
+ :arg insertBefore: True if you want to insert the text before the node
+ and False if you want to insert it after the node
+
"""
raise NotImplementedError
def insertBefore(self, node, refNode):
"""Insert node as a child of the current node, before refNode in the
list of child nodes. Raises ValueError if refNode is not a child of
- the current node"""
+ the current node
+
+ :arg node: the node to insert
+
+ :arg refNode: the child node to insert the node before
+
+ """
raise NotImplementedError
def removeChild(self, node):
"""Remove node from the children of the current node
+
+ :arg node: the child node to remove
+
"""
raise NotImplementedError
@@ -77,6 +98,9 @@ class Node(object):
"""Move all the children of the current node to newParent.
This is needed so that trees that don't store text as nodes move the
text in the correct way
+
+ :arg newParent: the node to move all this node's children to
+
"""
# XXX - should this method be made more general?
for child in self.childNodes:
@@ -121,10 +145,12 @@ class ActiveFormattingElements(list):
class TreeBuilder(object):
"""Base treebuilder implementation
- documentClass - the class to use for the bottommost node of a document
- elementClass - the class to use for HTML Elements
- commentClass - the class to use for comments
- doctypeClass - the class to use for doctypes
+
+ * documentClass - the class to use for the bottommost node of a document
+ * elementClass - the class to use for HTML Elements
+ * commentClass - the class to use for comments
+ * doctypeClass - the class to use for doctypes
+
"""
# pylint:disable=not-callable
@@ -144,6 +170,11 @@ class TreeBuilder(object):
fragmentClass = None
def __init__(self, namespaceHTMLElements):
+ """Create a TreeBuilder
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ """
if namespaceHTMLElements:
self.defaultNamespace = "http://www.w3.org/1999/xhtml"
else:
@@ -367,11 +398,11 @@ class TreeBuilder(object):
self.generateImpliedEndTags(exclude)
def getDocument(self):
- "Return the final tree"
+ """Return the final tree"""
return self.document
def getFragment(self):
- "Return the final fragment"
+ """Return the final fragment"""
# assert self.innerHTML
fragment = self.fragmentClass()
self.openElements[0].reparentChildren(fragment)
@@ -379,5 +410,8 @@ class TreeBuilder(object):
def testSerializer(self, node):
"""Serialize the subtree of node in the format required by unit tests
- node - the node from which to start serializing"""
+
+ :arg node: the node from which to start serializing
+
+ """
raise NotImplementedError
diff --git a/included_dependencies/html5lib/treebuilders/etree_lxml.py b/included_dependencies/html5lib/treebuilders/etree_lxml.py
index 908820c0..ca12a99c 100644
--- a/included_dependencies/html5lib/treebuilders/etree_lxml.py
+++ b/included_dependencies/html5lib/treebuilders/etree_lxml.py
@@ -309,7 +309,6 @@ class TreeBuilder(base.TreeBuilder):
super(TreeBuilder, self).insertComment(data, parent)
def insertRoot(self, token):
- """Create the document root"""
# Because of the way libxml2 works, it doesn't seem to be possible to
# alter information like the doctype after the tree has been parsed.
# Therefore we need to use the built-in parser to create our initial
diff --git a/included_dependencies/html5lib/treewalkers/__init__.py b/included_dependencies/html5lib/treewalkers/__init__.py
index 9e19a559..9bec2076 100644
--- a/included_dependencies/html5lib/treewalkers/__init__.py
+++ b/included_dependencies/html5lib/treewalkers/__init__.py
@@ -13,7 +13,7 @@ from __future__ import absolute_import, division, unicode_literals
from .. import constants
from .._utils import default_etree
-__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshi", "etree_lxml"]
+__all__ = ["getTreeWalker", "pprint"]
treeWalkerCache = {}
@@ -21,20 +21,25 @@ treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
- Args:
- treeType (str): the name of the tree type required (case-insensitive).
- Supported values are:
+ :arg str treeType: the name of the tree type required (case-insensitive).
+ Supported values are:
- - "dom": The xml.dom.minidom DOM implementation
- - "etree": A generic walker for tree implementations exposing an
- elementtree-like interface (known to work with
- ElementTree, cElementTree and lxml.etree).
- - "lxml": Optimized walker for lxml.etree
- - "genshi": a Genshi stream
+ * "dom": The xml.dom.minidom DOM implementation
+ * "etree": A generic walker for tree implementations exposing an
+ elementtree-like interface (known to work with ElementTree,
+ cElementTree and lxml.etree).
+ * "lxml": Optimized walker for lxml.etree
+ * "genshi": a Genshi stream
+
+ :arg implementation: A module implementing the tree type e.g.
+ xml.etree.ElementTree or cElementTree (Currently applies to the "etree"
+ tree type only).
+
+ :arg kwargs: keyword arguments passed to the etree walker--for other
+ walkers, this has no effect
+
+ :returns: a TreeWalker class
- Implementation: A module implementing the tree type e.g.
- xml.etree.ElementTree or cElementTree (Currently applies to the
- "etree" tree type only).
"""
treeType = treeType.lower()
@@ -73,7 +78,13 @@ def concatenateCharacterTokens(tokens):
def pprint(walker):
- """Pretty printer for tree walkers"""
+ """Pretty printer for tree walkers
+
+ Takes a TreeWalker instance and pretty prints the output of walking the tree.
+
+ :arg walker: a TreeWalker instance
+
+ """
output = []
indent = 0
for token in concatenateCharacterTokens(walker):
diff --git a/included_dependencies/html5lib/treewalkers/base.py b/included_dependencies/html5lib/treewalkers/base.py
index 36e1ba24..80c474c4 100644
--- a/included_dependencies/html5lib/treewalkers/base.py
+++ b/included_dependencies/html5lib/treewalkers/base.py
@@ -18,16 +18,48 @@ spaceCharacters = "".join(spaceCharacters)
class TreeWalker(object):
+ """Walks a tree yielding tokens
+
+ Tokens are dicts that all have a ``type`` field specifying the type of the
+ token.
+
+ """
def __init__(self, tree):
+ """Creates a TreeWalker
+
+ :arg tree: the tree to walk
+
+ """
self.tree = tree
def __iter__(self):
raise NotImplementedError
def error(self, msg):
+ """Generates an error token with the given message
+
+ :arg msg: the error message
+
+ :returns: SerializeError token
+
+ """
return {"type": "SerializeError", "data": msg}
def emptyTag(self, namespace, name, attrs, hasChildren=False):
+ """Generates an EmptyTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :arg attrs: the attributes of the element as a dict
+
+ :arg hasChildren: whether or not to yield a SerializationError because
+ this tag shouldn't have children
+
+ :returns: EmptyTag token
+
+ """
yield {"type": "EmptyTag", "name": name,
"namespace": namespace,
"data": attrs}
@@ -35,17 +67,61 @@ class TreeWalker(object):
yield self.error("Void element has children")
def startTag(self, namespace, name, attrs):
+ """Generates a StartTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :arg attrs: the attributes of the element as a dict
+
+ :returns: StartTag token
+
+ """
return {"type": "StartTag",
"name": name,
"namespace": namespace,
"data": attrs}
def endTag(self, namespace, name):
+ """Generates an EndTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :returns: EndTag token
+
+ """
return {"type": "EndTag",
"name": name,
"namespace": namespace}
def text(self, data):
+ """Generates SpaceCharacters and Characters tokens
+
+ Depending on what's in the data, this generates one or more
+ ``SpaceCharacters`` and ``Characters`` tokens.
+
+ For example:
+
+ >>> from html5lib.treewalkers.base import TreeWalker
+ >>> # Give it an empty tree just so it instantiates
+ >>> walker = TreeWalker([])
+ >>> list(walker.text(''))
+ []
+ >>> list(walker.text(' '))
+ [{u'data': ' ', u'type': u'SpaceCharacters'}]
+ >>> list(walker.text(' abc ')) # doctest: +NORMALIZE_WHITESPACE
+ [{u'data': ' ', u'type': u'SpaceCharacters'},
+ {u'data': u'abc', u'type': u'Characters'},
+ {u'data': u' ', u'type': u'SpaceCharacters'}]
+
+ :arg data: the text data
+
+ :returns: one or more ``SpaceCharacters`` and ``Characters`` tokens
+
+ """
data = data
middle = data.lstrip(spaceCharacters)
left = data[:len(data) - len(middle)]
@@ -60,18 +136,44 @@ class TreeWalker(object):
yield {"type": "SpaceCharacters", "data": right}
def comment(self, data):
+ """Generates a Comment token
+
+ :arg data: the comment
+
+ :returns: Comment token
+
+ """
return {"type": "Comment", "data": data}
def doctype(self, name, publicId=None, systemId=None):
+ """Generates a Doctype token
+
+ :arg name:
+
+ :arg publicId:
+
+ :arg systemId:
+
+ :returns: the Doctype token
+
+ """
return {"type": "Doctype",
"name": name,
"publicId": publicId,
"systemId": systemId}
def entity(self, name):
+ """Generates an Entity token
+
+ :arg name: the entity name
+
+ :returns: an Entity token
+
+ """
return {"type": "Entity", "name": name}
def unknown(self, nodeType):
+ """Handles unknown node types"""
return self.error("Unknown node type: " + nodeType)
diff --git a/included_dependencies/html5lib/treewalkers/etree.py b/included_dependencies/html5lib/treewalkers/etree.py
index 8f30f078..d15a7eeb 100644
--- a/included_dependencies/html5lib/treewalkers/etree.py
+++ b/included_dependencies/html5lib/treewalkers/etree.py
@@ -1,13 +1,6 @@
from __future__ import absolute_import, division, unicode_literals
-try:
- from collections import OrderedDict
-except ImportError:
- try:
- from ordereddict import OrderedDict
- except ImportError:
- OrderedDict = dict
-
+from collections import OrderedDict
import re
from six import string_types
From b5fa47838e2323c5152d6b561fc5594ecbb98233 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 13:43:41 -0500
Subject: [PATCH 107/120] Update code for Calibre Plugin create for py3.
---
makeplugin.py | 2 +-
makezip.py | 14 ++++++++------
2 files changed, 9 insertions(+), 7 deletions(-)
diff --git a/makeplugin.py b/makeplugin.py
index 62b61da7..72b69cbc 100644
--- a/makeplugin.py
+++ b/makeplugin.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright 2015, Jim Miller
+# Copyright 2018, Jim Miller
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/makezip.py b/makezip.py
index 0028a029..24e95d16 100644
--- a/makezip.py
+++ b/makezip.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# Copyright 2015, Jim Miller
+# Copyright 2018, Jim Miller
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,11 +15,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import absolute_import
import os, zipfile, sys
from glob import glob
+from six import text_type as unicode
+
def addFolderToZip(myZipFile,folder,exclude=[]):
- folder = folder.encode('ascii') #convert path to ascii for ZipFile Method
+ # print("folder:"+folder)
excludelist=[]
for ex in exclude:
excludelist.extend(glob(folder+"/"+ex))
@@ -27,7 +30,7 @@ def addFolderToZip(myZipFile,folder,exclude=[]):
if file in excludelist:
continue
if os.path.isfile(file):
- #print file
+ # print("folder file:"+file)
myZipFile.write(file, file, zipfile.ZIP_DEFLATED)
elif os.path.isdir(file):
addFolderToZip(myZipFile,file,exclude=exclude)
@@ -40,11 +43,10 @@ def createZipFile(filename,mode,files,exclude=[]):
for file in files:
if file in excludelist:
continue
- file = file.encode('ascii') #convert path to ascii for ZipFile Method
+ # print("file:"+file)
if os.path.isfile(file):
(filepath, filename) = os.path.split(file)
- #print file
- myZipFile.write( file, filename, zipfile.ZIP_DEFLATED )
+ myZipFile.write( file, unicode(filename), zipfile.ZIP_DEFLATED )
if os.path.isdir(file):
addFolderToZip(myZipFile,file,exclude=exclude)
myZipFile.close()
From 32857a9dad90ccf910cf17d91bfdc1601fb65539 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 13:48:14 -0500
Subject: [PATCH 108/120] Update version update code for py3.
---
version_update.py | 23 +++++++++++++++++++----
1 file changed, 19 insertions(+), 4 deletions(-)
diff --git a/version_update.py b/version_update.py
index 928052eb..ba870ed9 100644
--- a/version_update.py
+++ b/version_update.py
@@ -1,11 +1,26 @@
+#!/usr/bin/python
# -*- coding: utf-8 -*-
+# Copyright 2018, Jim Miller
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
import codecs, sys, re
from tempfile import mkstemp
from os import rename, close, unlink
-#print sys.argv[1:]
+#print(sys.argv[1:])
## Files that contain version numbers that will need to be updated.
version_files = [
@@ -56,7 +71,7 @@ version="2.3.6"
def do_loop(files, pattern, substring):
global saved_version
for source_file_path in files:
- print "src:"+source_file_path
+ print("src:"+source_file_path)
fh, target_file_path = mkstemp()
with codecs.open(target_file_path, 'w', 'utf-8') as target_file:
with codecs.open(source_file_path, 'r', 'utf-8') as source_file:
@@ -78,7 +93,7 @@ if __name__ == '__main__':
raise Exception()
[int(x) for x in args]
except:
- print "Requires exactly 3 numeric args: major minor micro"
+ print("Requires exactly 3 numeric args: major minor micro")
exit()
main(args)
-# print saved_version
+# print(saved_version)
From 389eb8969c7d8da26a5edca6ed4379ff7a5cfb73 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 14:16:41 -0500
Subject: [PATCH 109/120] Make INI order tool py2/py3.
---
ini-order.py | 27 +++++++++++++++++++++------
1 file changed, 21 insertions(+), 6 deletions(-)
diff --git a/ini-order.py b/ini-order.py
index be65928b..7c2cde75 100644
--- a/ini-order.py
+++ b/ini-order.py
@@ -1,16 +1,31 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright 2018, Jim Miller
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
import re
import sys
+from io import open # so py2.7 has open with encoding param.
argv = sys.argv[1:]
-# infile = argv[0]
-# outfile = argv[1]
-
sections = {}
cursectname = ""
cursectlines = []
-with open(argv[0],"r") as infile:
+with open(argv[0],"r", encoding="utf8") as infile:
for line in infile:
if re.match(r"^\[([^\]]+)\]$",line):
sections[cursectname] = cursectlines
@@ -40,8 +55,8 @@ leadsects = [
followsects = [
]
-with open(argv[1],"w") as outfile:
- kl = sections.keys()
+with open(argv[1],"w", encoding="utf8") as outfile:
+ kl = list(sections.keys())
kl.sort()
for k in leadsects:
outfile.write("".join(sections[k]))
From 39580268acfab5f4feec01bdd7cc5b4170fdda6e Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 14:22:57 -0500
Subject: [PATCH 110/120] Change [royalroadl.com] to [www.royalroad.com]
---
calibre-plugin/plugin-defaults.ini | 50 +++++++++++++++---------------
fanficfare/defaults.ini | 50 +++++++++++++++---------------
2 files changed, 50 insertions(+), 50 deletions(-)
diff --git a/calibre-plugin/plugin-defaults.ini b/calibre-plugin/plugin-defaults.ini
index c4e9180d..d3b4343a 100644
--- a/calibre-plugin/plugin-defaults.ini
+++ b/calibre-plugin/plugin-defaults.ini
@@ -1916,31 +1916,6 @@ comments_label:Comments
include_in_category:category,searchtags
-[royalroadl.com]
-extra_valid_entries:stars
-
-#add_to_extra_titlepage_entries:,stars
-
-## some sites include images that we don't ever want becoming the
-## cover image. This lets you exclude them.
-cover_exclusion_regexp:(imgur.com/dzOACJf.png|/forum/images/smilies/)
-
-## Clear FanFiction from defaults, site has fanfictions and original fiction.
-extratags:
-
-## royalroadl.com stories sometimes have 'spoiler' blocks in
-## posts. When viewed in a browser, the block is hidden until a button
-## is clicked. eBook viewers can't handle that and the javascript is
-## disabled. The remove_spoilers option, if uncommented, will remove
-## spoiler blocks entirely.
-#remove_spoilers:true
-
-## This option if uncommented, will put a box around the spoiler
-## blocks with the original spoiler button text as a label using
-## fieldset and legend HTML tags. For a simple box, see the
-## add_to_output_css example for [base_xenforoforum:epub].
-#legend_spoilers:true
-
[samandjack.net]
## Some sites require login (or login for some rated stories) The
## program can prompt you, or you can save it in config. In
@@ -2841,6 +2816,31 @@ extracategories:Queer as Folk
website_encodings:Windows-1252,utf8
+[www.royalroad.com]
+extra_valid_entries:stars
+
+#add_to_extra_titlepage_entries:,stars
+
+## some sites include images that we don't ever want becoming the
+## cover image. This lets you exclude them.
+cover_exclusion_regexp:(imgur.com/dzOACJf.png|/forum/images/smilies/)
+
+## Clear FanFiction from defaults, site has fanfictions and original fiction.
+extratags:
+
+## royalroad.com stories sometimes have 'spoiler' blocks in
+## posts. When viewed in a browser, the block is hidden until a button
+## is clicked. eBook viewers can't handle that and the javascript is
+## disabled. The remove_spoilers option, if uncommented, will remove
+## spoiler blocks entirely.
+#remove_spoilers:true
+
+## This option if uncommented, will put a box around the spoiler
+## blocks with the original spoiler button text as a label using
+## fieldset and legend HTML tags. For a simple box, see the
+## add_to_output_css example for [base_xenforoforum:epub].
+#legend_spoilers:true
+
[www.scarvesandcoffee.net]
## Some sites do not require a login, but do require the user to
## confirm they are adult for adult content. In commandline version,
diff --git a/fanficfare/defaults.ini b/fanficfare/defaults.ini
index 2b079e9d..3fcebf8d 100644
--- a/fanficfare/defaults.ini
+++ b/fanficfare/defaults.ini
@@ -1950,31 +1950,6 @@ comments_label:Comments
include_in_category:category,searchtags
-[royalroadl.com]
-extra_valid_entries:stars
-
-#add_to_extra_titlepage_entries:,stars
-
-## some sites include images that we don't ever want becoming the
-## cover image. This lets you exclude them.
-cover_exclusion_regexp:(imgur.com/dzOACJf.png|/forum/images/smilies/)
-
-## Clear FanFiction from defaults, site has fanfictions and original fiction.
-extratags:
-
-## royalroadl.com stories sometimes have 'spoiler' blocks in
-## posts. When viewed in a browser, the block is hidden until a button
-## is clicked. eBook viewers can't handle that and the javascript is
-## disabled. The remove_spoilers option, if uncommented, will remove
-## spoiler blocks entirely.
-#remove_spoilers:true
-
-## This option if uncommented, will put a box around the spoiler
-## blocks with the original spoiler button text as a label using
-## fieldset and legend HTML tags. For a simple box, see the
-## add_to_output_css example for [base_xenforoforum:epub].
-#legend_spoilers:true
-
[samandjack.net]
## Some sites require login (or login for some rated stories) The
## program can prompt you, or you can save it in config. In
@@ -2869,6 +2844,31 @@ extracategories:Queer as Folk
website_encodings:Windows-1252,utf8
+[www.royalroad.com]
+extra_valid_entries:stars
+
+#add_to_extra_titlepage_entries:,stars
+
+## some sites include images that we don't ever want becoming the
+## cover image. This lets you exclude them.
+cover_exclusion_regexp:(imgur.com/dzOACJf.png|/forum/images/smilies/)
+
+## Clear FanFiction from defaults, site has fanfictions and original fiction.
+extratags:
+
+## royalroad.com stories sometimes have 'spoiler' blocks in
+## posts. When viewed in a browser, the block is hidden until a button
+## is clicked. eBook viewers can't handle that and the javascript is
+## disabled. The remove_spoilers option, if uncommented, will remove
+## spoiler blocks entirely.
+#remove_spoilers:true
+
+## This option if uncommented, will put a box around the spoiler
+## blocks with the original spoiler button text as a label using
+## fieldset and legend HTML tags. For a simple box, see the
+## add_to_output_css example for [base_xenforoforum:epub].
+#legend_spoilers:true
+
[www.scarvesandcoffee.net]
## Some sites do not require a login, but do require the user to
## confirm they are adult for adult content. In commandline version,
From 7b44ef106e5bb0613cfd61ffae315d5c710576e9 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 14:23:46 -0500
Subject: [PATCH 111/120] Accept both [royalroad.com] and pre-existing
[royalroadl.com] sections.
---
fanficfare/adapters/adapter_royalroadl.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/fanficfare/adapters/adapter_royalroadl.py b/fanficfare/adapters/adapter_royalroadl.py
index d2451b0f..e5a031d2 100644
--- a/fanficfare/adapters/adapter_royalroadl.py
+++ b/fanficfare/adapters/adapter_royalroadl.py
@@ -100,6 +100,11 @@ class RoyalRoadAdapter(BaseSiteAdapter):
def getAcceptDomains(cls):
return ['royalroad.com','royalroadl.com','www.royalroadl.com']
+ @classmethod
+ def getConfigSections(cls):
+ "Only needs to be overriden if has additional ini sections."
+ return ['royalroadl.com',cls.getSiteDomain()]
+
@classmethod
def getSiteExampleURLs(cls):
return "https://www.royalroad.com/fiction/3056"
From 2cd4be0db0238441236927b3b54eb6725ef829b0 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 14:40:26 -0500
Subject: [PATCH 112/120] Bump Test Version 2.37.2
---
calibre-plugin/__init__.py | 2 +-
fanficfare/cli.py | 2 +-
setup.py | 2 +-
webservice/app.yaml | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/calibre-plugin/__init__.py b/calibre-plugin/__init__.py
index 1d96b215..c4d992ef 100644
--- a/calibre-plugin/__init__.py
+++ b/calibre-plugin/__init__.py
@@ -33,7 +33,7 @@ except NameError:
from calibre.customize import InterfaceActionBase
# pulled out from FanFicFareBase for saving in prefs.py
-__version__ = (2, 37, 1)
+__version__ = (2, 37, 2)
## Apparently the name for this class doesn't matter--it was still
## 'demo' for the first few versions.
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index f867216a..2405445a 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -39,7 +39,7 @@ else: # > 3.0
def pickle_load(f):
return pickle.load(f,encoding="bytes")
-version="2.37.1"
+version="2.37.2"
os.environ['CURRENT_VERSION_ID']=version
if sys.version_info >= (2, 7):
diff --git a/setup.py b/setup.py
index c53198bf..a0ddea92 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@ setup(
name=package_name,
# Versions should comply with PEP440.
- version="2.37.1",
+ version="2.37.2",
description='A tool for downloading fanfiction to eBook formats',
long_description=long_description,
diff --git a/webservice/app.yaml b/webservice/app.yaml
index 8da6b2f0..7019ab90 100644
--- a/webservice/app.yaml
+++ b/webservice/app.yaml
@@ -1,6 +1,6 @@
# ffd-retief-hrd fanficfare
application: fanficfare
-version: 2-37-1
+version: 2-37-2
runtime: python27
api_version: 1
threadsafe: true
From c7cc2a3e0f99670c45cea65e6ca20b6222221ccd Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 22:10:32 -0500
Subject: [PATCH 113/120] Update ini copyrights.
---
calibre-plugin/plugin-defaults.ini | 2 +-
fanficfare/defaults.ini | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/calibre-plugin/plugin-defaults.ini b/calibre-plugin/plugin-defaults.ini
index d3b4343a..cf000204 100644
--- a/calibre-plugin/plugin-defaults.ini
+++ b/calibre-plugin/plugin-defaults.ini
@@ -1,4 +1,4 @@
-# Copyright 2015 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/fanficfare/defaults.ini b/fanficfare/defaults.ini
index 3fcebf8d..48f8edc6 100644
--- a/fanficfare/defaults.ini
+++ b/fanficfare/defaults.ini
@@ -1,4 +1,4 @@
-# Copyright 2015 Fanficdownloader team, 2017 FanFicFare team
+# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
From 9397c5e1f75162c62445953d45564abcf4703738 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Wed, 8 Aug 2018 22:17:34 -0500
Subject: [PATCH 114/120] Fix a stray print to log in mobihtml.py
---
fanficfare/mobihtml.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index 92fbfff4..04b5c0b9 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -7,9 +7,10 @@ from __future__ import absolute_import
import re
import sys
-from .six.moves.urllib.parse import unquote
+import logging
# py2 vs py3 transition
+from .six.moves.urllib.parse import unquote
from .six import text_type as unicode
from .six import binary_type as bytes
@@ -17,6 +18,8 @@ from .six import binary_type as bytes
# BeautifulSoup = bs4.BeautifulSoup
from bs4 import BeautifulSoup
+logger = logging.getLogger(__name__)
+
class HtmlProcessor:
WHITESPACE_RE = re.compile(r'\s')
# Look for
@@ -69,7 +72,7 @@ class HtmlProcessor:
# TODO(chatham): Using regexes and looking for name= would be better.
newpos = assembled_text.rfind(ref) # .encode('utf-8')
if newpos == -1:
- print >>sys.stderr, 'Could not find anchor "%s"' % original_ref
+ logger.warn('Could not find anchor "%s"' % original_ref)
continue
newpos += len(ref) + 2 # don't point into the middle of the tag
old_filepos = 'filepos="%.10d"' % anchor_num
From 83d923300d4e8859a57425a18063d5958c9d1ef7 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 9 Aug 2018 14:36:08 -0500
Subject: [PATCH 115/120] Fix for mobi issue with 0 byte record markers being
misplaced.
---
fanficfare/mobi.py | 2 +-
fanficfare/mobihtml.py | 5 ++++-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/fanficfare/mobi.py b/fanficfare/mobi.py
index c61449a2..1a220f2e 100644
--- a/fanficfare/mobi.py
+++ b/fanficfare/mobi.py
@@ -140,7 +140,7 @@ class Converter:
def _ConvertStringToFile(self, html_data, out):
html = HtmlProcessor(html_data)
- data = html.CleanHtml()
+ data = ensure_binary(html.CleanHtml())
# collect offsets of '' tags, use to make index list.
# indexlist = [] # list of (offset,length) tuples.
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index 04b5c0b9..61d3d6d4 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -1,5 +1,8 @@
#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
# Copyright(c) 2009 Andrew Chatham and Vijay Pandurangan
+# Changes Copyright 2018 FanFicFare team
## This module is used by mobi.py exclusively.
## Renamed Jul 2018 to avoid conflict with other 'html' packages
@@ -43,7 +46,7 @@ class HtmlProcessor:
def _StubInternalAnchors(self):
'''Replace each internal anchor with a fixed-size filepos anchor.
-\
+
Looks for every anchor with and replaces that
with . Stores anchors in self._anchor_references'''
self._anchor_references = []
From 6fbf3bc282a364c7f064f3c287a77020c4547aa7 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 9 Aug 2018 17:31:31 -0500
Subject: [PATCH 116/120] Fix for mobi broken page breaks at 'file' boundries
and inline 'TOC' links.
---
fanficfare/mobihtml.py | 21 +++++++++++++++------
1 file changed, 15 insertions(+), 6 deletions(-)
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index 61d3d6d4..639d1c67 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -16,6 +16,7 @@ import logging
from .six.moves.urllib.parse import unquote
from .six import text_type as unicode
from .six import binary_type as bytes
+from .six import ensure_binary
# import bs4
# BeautifulSoup = bs4.BeautifulSoup
@@ -57,8 +58,9 @@ class HtmlProcessor:
anchorlist.extend(self._soup.findAll('reference', href=re.compile('^#')))
for anchor in anchorlist:
self._anchor_references.append((anchor_num, anchor['href']))
- del anchor['href']
anchor['filepos'] = '%.10d' % anchor_num
+ # logger.debug("Add anchor: %s %s"%((anchor_num, anchor)))
+ del anchor['href']
anchor_num += 1
def _ReplaceAnchorStubs(self):
@@ -66,20 +68,27 @@ class HtmlProcessor:
# str() instead of unicode() rather than figure out how to fix
# ancient mobi.py code.
- assembled_text = unicode(self._soup)
+ assembled_text = ensure_binary(unicode(self._soup))
+ # bs4 creating close tags for
+ assembled_text = assembled_text.replace(b'',b'')
+ assembled_text = assembled_text.replace(b'',b'')
del self._soup # shouldn't touch this anymore
for anchor_num, original_ref in self._anchor_references:
ref = unquote(original_ref[1:]) # remove leading '#'
# Find the position of ref in the utf-8 document.
# TODO(chatham): Using regexes and looking for name= would be better.
- newpos = assembled_text.rfind(ref) # .encode('utf-8')
+ newpos = assembled_text.find(b'name="'+ensure_binary(ref)) # .encode('utf-8')
if newpos == -1:
logger.warn('Could not find anchor "%s"' % original_ref)
continue
- newpos += len(ref) + 2 # don't point into the middle of the tag
- old_filepos = 'filepos="%.10d"' % anchor_num
- new_filepos = 'filepos="%.10d"' % newpos
+ # instead of somewhere slightly *after* the tag pointed to,
+ # let's go right in front of it instead by looking for the page
+ # break before it.
+ newpos = assembled_text.rfind(b'<',0,newpos)
+ # logger.debug("Anchor Pos: %s %s '%s|%s'"%((anchor_num, newpos,assembled_text[newpos-15:newpos],assembled_text[newpos:newpos+15])))
+ old_filepos = b'filepos="%.10d"' % anchor_num
+ new_filepos = b'filepos="%.10d"' % newpos
assert assembled_text.find(old_filepos) != -1
assembled_text = assembled_text.replace(old_filepos, new_filepos, 1)
return assembled_text
From a93eeec5ebb68e3bed81e6ac26193f3440c4ccb5 Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Thu, 9 Aug 2018 19:54:01 -0500
Subject: [PATCH 117/120] Fix for mobi output--link to TOC works again--was
broken by html5lib enforcing html5 rules.
---
fanficfare/mobi.py | 4 +++-
fanficfare/mobihtml.py | 10 +++++++---
2 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/fanficfare/mobi.py b/fanficfare/mobi.py
index 1a220f2e..406d6180 100644
--- a/fanficfare/mobi.py
+++ b/fanficfare/mobi.py
@@ -91,7 +91,9 @@ class Converter:
toc_html = []
body_html = []
- PAGE_BREAK = ''
+ ## This gets broken by html5lib/bs4fixed being helpful, but we'll
+ ## fix it inside mobihtml.py
+ PAGE_BREAK = ''
# pull out the title page, assumed first html_strs.
htmltitle = html_strs[0]
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index 639d1c67..f9125795 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -33,6 +33,12 @@ class HtmlProcessor:
self.unfill = unfill
# html = self._ProcessRawHtml(html)
self._soup = BeautifulSoup(html,'html5lib')
+ ## mobi format wants to find this tag inside .
+ ## html5lib, on the other hand, moved it to . So we'll move
+ ## it back.
+ guide = self._soup.find('guide')
+ if guide:
+ self._soup.head.append(guide)
if self._soup.title.contents:
self.title = self._soup.title.contents[0]
else:
@@ -66,10 +72,8 @@ class HtmlProcessor:
def _ReplaceAnchorStubs(self):
# TODO: Browsers allow extra whitespace in the href names.
- # str() instead of unicode() rather than figure out how to fix
- # ancient mobi.py code.
assembled_text = ensure_binary(unicode(self._soup))
- # bs4 creating close tags for
+ # html5lib/bs4 creates close tags for
assembled_text = assembled_text.replace(b'',b'')
assembled_text = assembled_text.replace(b'',b'')
From ff5e27a89ce18feef6ce2818ffd34ab07cb483de Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Fri, 10 Aug 2018 10:55:11 -0500
Subject: [PATCH 118/120] MOBI Debug output
---
fanficfare/mobi.py | 21 ++++++++++++++++++---
fanficfare/mobihtml.py | 2 ++
2 files changed, 20 insertions(+), 3 deletions(-)
diff --git a/fanficfare/mobi.py b/fanficfare/mobi.py
index 406d6180..2f59c72c 100644
--- a/fanficfare/mobi.py
+++ b/fanficfare/mobi.py
@@ -127,6 +127,11 @@ class Converter:
''' % time.ctime(time.time())
footer = ''
+ # logger.debug("header:%s"%header)
+ # logger.debug("title_html:%s"%title_html)
+ # logger.debug("toc_html:%s"%toc_html)
+ # logger.debug("body_html:%s"%body_html)
+ # logger.debug("footer:%s"%footer)
all_html = header + '\n'.join(title_html + toc_html + body_html) + footer
#print "%s" % all_html.encode('utf8')
return all_html
@@ -138,7 +143,7 @@ class Converter:
except Exception as e:
raise
logger.error('Error %s', e)
- #logger.debug('Details: %s' % html_strs)
+ # logger.debug('Details: %s' % html_strs)
def _ConvertStringToFile(self, html_data, out):
html = HtmlProcessor(html_data)
@@ -163,11 +168,12 @@ class Converter:
# if title:
# self._header.SetTitle(title)
record_id = 1
+ # logger.debug("len(data):%s"%len(data))
for start_pos in range(0, len(data), Record.MAX_SIZE):
end = min(len(data), start_pos + Record.MAX_SIZE)
record_data = data[start_pos:end]
records.append(self._header.AddRecord(record_data, record_id))
- #print "HTML Record %03d: (size:%d) [[%s ... %s]]" % ( record_id, len(record_data), record_data[:20], record_data[-20:] )
+ # logger.debug("HTML Record %03d: (size:%d) [[%s ... %s]]" % ( record_id, len(record_data), record_data[:20], record_data[-20:] ))
record_id += 1
self._header.SetImageRecordIndex(record_id)
records[0:0] = [self._header.MobiHeader()]
@@ -176,7 +182,7 @@ class Converter:
out.write(ensure_binary(header))
for record in records:
record.WriteHeader(out, rec_offset)
- #print "rec_offset: %d len(record.data): %d" % (rec_offset,len(record.data))
+ # logger.debug("rec_offset: %d len(record.data): %d" % (rec_offset,len(record.data)))
rec_offset += (len(record.data)+1) # plus one for trailing null
# Write to nuls for some reason
@@ -256,6 +262,7 @@ class Header:
def AddRecord(self, data, record_id):
self.max_record_size = max(Record.MAX_SIZE, len(data))
self._record_count += 1
+ # logger.debug("len(data):%s"%len(data))
self._length += len(data)
return Record(data, record_id)
@@ -279,12 +286,15 @@ class Header:
return palmdoc_header
def PDBHeader(self, num_records):
+ # logger.debug("num_records:%s"%num_records)
HEADER_LEN = 32+2+2+9*4
RECORD_INDEX_HEADER_LEN = 6
RESOURCE_INDEX_LEN = 10
index_len = RECORD_INDEX_HEADER_LEN + num_records * Record.INDEX_LEN
rec_offset = HEADER_LEN + index_len + 2
+ # logger.debug("index_len:%s"%index_len)
+ # logger.debug("rec_offset:%s"%rec_offset)
short_title = self._title[0:31]
attributes = 0
@@ -321,10 +331,12 @@ class Header:
length_encoding_len = 8
r.append(struct.pack('>LL', typeid, len(value) + length_encoding_len,) + value)
content = b''.join(r)
+ # logger.debug("len(content):%s"%len(content))
# Pad to word boundary
while len(content) % 4:
content += b'\0'
+ # logger.debug("len(content):%s"%len(content))
TODO_mysterious = 12
exth = b'EXTH' + struct.pack('>LL', len(content) + TODO_mysterious, len(data)) + content
return exth
@@ -346,6 +358,9 @@ class Header:
creator_version = 4
reserved = b'%c' % 0xff * 40
nonbook_index = fs
+ # logger.debug("header_len:%s"%header_len)
+ # logger.debug("len(palmdoc_header):%s"%len(palmdoc_header))
+ # logger.debug("len(exth_header):%s"%len(exth_header))
full_name_offset = header_len + len(palmdoc_header) + len(exth_header) # put full name after header
language = languages['en-us']
unused = 0
diff --git a/fanficfare/mobihtml.py b/fanficfare/mobihtml.py
index f9125795..8a3f6713 100644
--- a/fanficfare/mobihtml.py
+++ b/fanficfare/mobihtml.py
@@ -33,12 +33,14 @@ class HtmlProcessor:
self.unfill = unfill
# html = self._ProcessRawHtml(html)
self._soup = BeautifulSoup(html,'html5lib')
+ # logger.debug(html)
## mobi format wants to find this tag inside .
## html5lib, on the other hand, moved it to . So we'll move
## it back.
guide = self._soup.find('guide')
if guide:
self._soup.head.append(guide)
+ # logger.debug(self._soup)
if self._soup.title.contents:
self.title = self._soup.title.contents[0]
else:
From f7bf2f7d0af8f367f76884e09645385d2783cceb Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Fri, 10 Aug 2018 11:34:19 -0500
Subject: [PATCH 119/120] Bump Test Version 2.37.3
---
calibre-plugin/__init__.py | 2 +-
fanficfare/cli.py | 2 +-
setup.py | 2 +-
webservice/app.yaml | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/calibre-plugin/__init__.py b/calibre-plugin/__init__.py
index c4d992ef..941e45ad 100644
--- a/calibre-plugin/__init__.py
+++ b/calibre-plugin/__init__.py
@@ -33,7 +33,7 @@ except NameError:
from calibre.customize import InterfaceActionBase
# pulled out from FanFicFareBase for saving in prefs.py
-__version__ = (2, 37, 2)
+__version__ = (2, 37, 3)
## Apparently the name for this class doesn't matter--it was still
## 'demo' for the first few versions.
diff --git a/fanficfare/cli.py b/fanficfare/cli.py
index 2405445a..ea3f7153 100644
--- a/fanficfare/cli.py
+++ b/fanficfare/cli.py
@@ -39,7 +39,7 @@ else: # > 3.0
def pickle_load(f):
return pickle.load(f,encoding="bytes")
-version="2.37.2"
+version="2.37.3"
os.environ['CURRENT_VERSION_ID']=version
if sys.version_info >= (2, 7):
diff --git a/setup.py b/setup.py
index a0ddea92..ef03b3c5 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@ setup(
name=package_name,
# Versions should comply with PEP440.
- version="2.37.2",
+ version="2.37.3",
description='A tool for downloading fanfiction to eBook formats',
long_description=long_description,
diff --git a/webservice/app.yaml b/webservice/app.yaml
index 7019ab90..0ec2f729 100644
--- a/webservice/app.yaml
+++ b/webservice/app.yaml
@@ -1,6 +1,6 @@
# ffd-retief-hrd fanficfare
application: fanficfare
-version: 2-37-2
+version: 2-37-3
runtime: python27
api_version: 1
threadsafe: true
From d50e6d084b02a277471e1f608a89c694dac6435e Mon Sep 17 00:00:00 2001
From: Jim Miller
Date: Fri, 10 Aug 2018 13:29:14 -0500
Subject: [PATCH 120/120] Put Nook STR Cover 'fix' back in.
---
fanficfare/writers/writer_epub.py | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/fanficfare/writers/writer_epub.py b/fanficfare/writers/writer_epub.py
index e76e4dc8..6907129c 100644
--- a/fanficfare/writers/writer_epub.py
+++ b/fanficfare/writers/writer_epub.py
@@ -26,6 +26,7 @@ import re
# py2 vs py3 transition
from ..six import text_type as unicode
from ..six import string_types as basestring
+from ..six import ensure_binary
from ..six import BytesIO # StringIO under py2
## XML isn't as forgiving as HTML, so rather than generate as strings,
@@ -580,10 +581,10 @@ div { margin: 0pt; padding: 0pt; }
# write content.opf to zip.
contentxml = contentdom.toxml(encoding='utf-8')
- # Causes py2 vs py3 issues with encoding nonsense. Skip for now.
# tweak for brain damaged Nook STR. Nook insists on name before content.
- # contentxml = contentxml.replace(''%coverimgid,
- # ''%coverimgid)
+ contentxml = contentxml.replace(ensure_binary(''%coverimgid),
+ ensure_binary(''%coverimgid))
+
outputepub.writestr("content.opf",contentxml)
contentdom.unlink()