Dual compatible cli.py.

This commit is contained in:
Jim Miller 2018-07-26 17:39:08 -05:00
parent 8627bee253
commit a2a0ff0bfd
3 changed files with 40 additions and 596 deletions

View file

@ -23,10 +23,8 @@ from six.moves.urllib.parse import urlparse
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
print(sys.path)
from .. import exceptions as exceptions from .. import exceptions as exceptions
## must import each adapter here. ## must import each adapter here.
from . import adapter_test1 from . import adapter_test1

View file

@ -19,8 +19,8 @@ from optparse import OptionParser, SUPPRESS_HELP
from os.path import expanduser, join, dirname from os.path import expanduser, join, dirname
from os import access, R_OK from os import access, R_OK
from subprocess import call from subprocess import call
from StringIO import StringIO from six import StringIO
import ConfigParser from six.moves import configparser
import getpass import getpass
import logging import logging
import pprint import pprint
@ -28,14 +28,13 @@ import string
import os, sys import os, sys
import pickle import pickle
import cookielib as cl from six.moves import http_cookiejar as cl
version="2.28.0" version="2.28.0"
os.environ['CURRENT_VERSION_ID']=version os.environ['CURRENT_VERSION_ID']=version
print("Python Version:%s"%sys.version) if sys.version_info < (2, 5):
if sys.version_info < (2, 5) or sys.version_info > (3,0): print('This program requires Python 2.5 or newer.')
print('This program requires Python 2.5 or newer. Python 3 is not supported.')
sys.exit(1) sys.exit(1)
if sys.version_info >= (2, 7): if sys.version_info >= (2, 7):
@ -73,6 +72,7 @@ def main(argv=None,
parser=None, parser=None,
passed_defaultsini=None, passed_defaultsini=None,
passed_personalini=None): passed_personalini=None):
logger.debug("Python Version:%s"%sys.version)
if argv is None: if argv is None:
argv = sys.argv[1:] argv = sys.argv[1:]
# read in args, anything starting with -- will be treated as --<varible>=<value> # read in args, anything starting with -- will be treated as --<varible>=<value>
@ -180,9 +180,9 @@ def main(argv=None,
if options.siteslist: if options.siteslist:
for site, examples in adapters.getSiteExamples(): for site, examples in adapters.getSiteExamples():
print '\n#### %s\nExample URLs:' % site print('\n#### %s\nExample URLs:' % site)
for u in examples: for u in examples:
print ' * %s' % u print(' * %s' % u)
return return
if options.update and options.format != 'epub': if options.update and options.format != 'epub':
@ -204,14 +204,14 @@ def main(argv=None,
passed_defaultsini, passed_defaultsini,
passed_personalini,options) passed_personalini,options)
retlist = get_urls_from_page(options.list, configuration) retlist = get_urls_from_page(options.list, configuration)
print '\n'.join(retlist) print('\n'.join(retlist))
if options.normalize: if options.normalize:
configuration = get_configuration(options.normalize, configuration = get_configuration(options.normalize,
passed_defaultsini, passed_defaultsini,
passed_personalini,options) passed_personalini,options)
retlist = get_urls_from_page(options.normalize, configuration,normalize=True) retlist = get_urls_from_page(options.normalize, configuration,normalize=True)
print '\n'.join(retlist) print('\n'.join(retlist))
if options.downloadlist: if options.downloadlist:
configuration = get_configuration(options.downloadlist, configuration = get_configuration(options.downloadlist,
@ -234,18 +234,18 @@ def main(argv=None,
if options.downloadimap: if options.downloadimap:
urls.extend(retlist) urls.extend(retlist)
else: else:
print '\n'.join(retlist) print('\n'.join(retlist))
# for passing in a file list # for passing in a file list
if options.infile: if options.infile:
with open(options.infile,"r") as infile: with open(options.infile,"r") as infile:
#print "File exists and is readable" #print("file exists and is readable")
for url in infile: for url in infile:
if '#' in url: if '#' in url:
url = url[:url.find('#')].strip() url = url[:url.find('#')].strip()
url = url.strip() url = url.strip()
if len(url) > 0: if len(url) > 0:
#print "URL: (%s)"%url #print("url: (%s)"%url)
urls.append(url) urls.append(url)
if options.save_cache: if options.save_cache:
@ -255,11 +255,11 @@ def main(argv=None,
options.cookiejar = cl.LWPCookieJar() options.cookiejar = cl.LWPCookieJar()
options.cookiejar.load('global_cookies') options.cookiejar.load('global_cookies')
except: except:
print("Didn't load global_cache") print("didn't load global_cache")
if not list_only: if not list_only:
if len(urls) < 1: if len(urls) < 1:
print "No valid story URLs found" print("No valid story URLs found")
else: else:
for url in urls: for url in urls:
try: try:
@ -268,10 +268,10 @@ def main(argv=None,
passed_defaultsini, passed_defaultsini,
passed_personalini) passed_personalini)
#print("pagecache:%s"%options.pagecache.keys()) #print("pagecache:%s"%options.pagecache.keys())
except Exception, e: except Exception as e:
if len(urls) == 1: if len(urls) == 1:
raise raise
print "URL(%s) Failed: Exception (%s). Run URL individually for more detail."%(url,e) print("URL(%s) Failed: Exception (%s). Run URL individually for more detail."%(url,e))
if options.save_cache: if options.save_cache:
with open('global_cache','wb') as jout: with open('global_cache','wb') as jout:
@ -297,9 +297,9 @@ def do_download(arg,
try: try:
url, chaptercount = get_dcsource_chaptercount(arg) url, chaptercount = get_dcsource_chaptercount(arg)
if not url: if not url:
print 'No story URL found in epub to update.' print('No story URL found in epub to update.')
return return
print 'Updating %s, URL: %s' % (arg, url) print('Updating %s, URL: %s' % (arg, url))
output_filename = arg output_filename = arg
except Exception: except Exception:
# if there's an error reading the update file, maybe it's a URL? # if there's an error reading the update file, maybe it's a URL?
@ -343,7 +343,7 @@ def do_download(arg,
writer = writers.getWriter('epub', configuration, adapter) writer = writers.getWriter('epub', configuration, adapter)
output_filename = writer.getOutputFileName() output_filename = writer.getOutputFileName()
noturl, chaptercount = get_dcsource_chaptercount(output_filename) noturl, chaptercount = get_dcsource_chaptercount(output_filename)
print 'Updating %s, URL: %s' % (output_filename, url) print('Updating %s, URL: %s' % (output_filename, url))
except Exception: except Exception:
options.update = False options.update = False
pass pass
@ -360,38 +360,38 @@ def do_download(arg,
try: try:
import Image import Image
except ImportError: except ImportError:
print "You have include_images enabled, but Python Image Library(PIL) isn't found.\nImages will be included full size in original format.\nContinue? (y/n)?" print("You have include_images enabled, but Python Image Library(PIL) isn't found.\nImages will be included full size in original format.\nContinue? (y/n)?")
if options.interactive: if options.interactive:
if not sys.stdin.readline().strip().lower().startswith('y'): if not sys.stdin.readline().strip().lower().startswith('y'):
return return
else: else:
# for non-interactive, default the response to yes and continue processing # for non-interactive, default the response to yes and continue processing
print 'y' print('y')
# three tries, that's enough if both user/pass & is_adult needed, # three tries, that's enough if both user/pass & is_adult needed,
# or a couple tries of one or the other # or a couple tries of one or the other
for x in range(0, 2): for x in range(0, 2):
try: try:
adapter.getStoryMetadataOnly() adapter.getStoryMetadataOnly()
except exceptions.FailedToLogin, f: except exceptions.FailedToLogin as f:
if not options.interactive: if not options.interactive:
print 'Login Failed on non-interactive process. Set username and password in personal.ini.' print('Login Failed on non-interactive process. Set username and password in personal.ini.')
return return
if f.passwdonly: if f.passwdonly:
print 'Story requires a password.' print('Story requires a password.')
else: else:
print 'Login Failed, Need Username/Password.' print('Login Failed, Need Username/Password.')
sys.stdout.write('Username: ') sys.stdout.write('Username: ')
adapter.username = sys.stdin.readline().strip() adapter.username = sys.stdin.readline().strip()
adapter.password = getpass.getpass(prompt='Password: ') adapter.password = getpass.getpass(prompt='Password: ')
# print('Login: `%s`, Password: `%s`' % (adapter.username, adapter.password)) # print('Login: `%s`, Password: `%s`' % (adapter.username, adapter.password))
except exceptions.AdultCheckRequired: except exceptions.AdultCheckRequired:
if options.interactive: if options.interactive:
print 'Please confirm you are an adult in your locale: (y/n)?' print('Please confirm you are an adult in your locale: (y/n)?')
if sys.stdin.readline().strip().lower().startswith('y'): if sys.stdin.readline().strip().lower().startswith('y'):
adapter.is_adult = True adapter.is_adult = True
else: else:
print 'Adult check required on non-interactive process. Set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.' print('Adult check required on non-interactive process. Set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.')
return return
if options.update and not options.force: if options.update and not options.force:
@ -400,11 +400,11 @@ def do_download(arg,
urlchaptercount = adapter.getStoryMetadataOnly().getChapterCount() urlchaptercount = adapter.getStoryMetadataOnly().getChapterCount()
if chaptercount == urlchaptercount and not options.metaonly: if chaptercount == urlchaptercount and not options.metaonly:
print '%s already contains %d chapters.' % (output_filename, chaptercount) print('%s already contains %d chapters.' % (output_filename, chaptercount))
elif chaptercount > urlchaptercount: elif chaptercount > urlchaptercount:
print '%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount) print('%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount))
elif chaptercount == 0: elif chaptercount == 0:
print "%s doesn't contain any recognizable chapters, probably from a different source. Not updating." % output_filename print("%s doesn't contain any recognizable chapters, probably from a different source. Not updating." % output_filename)
else: else:
# update now handled by pre-populating the old # update now handled by pre-populating the old
# images and chapters in the adapter rather than # images and chapters in the adapter rather than
@ -419,7 +419,7 @@ def do_download(arg,
adapter.oldchaptersmap, adapter.oldchaptersmap,
adapter.oldchaptersdata) = (get_update_data(output_filename))[0:9] adapter.oldchaptersdata) = (get_update_data(output_filename))[0:9]
print 'Do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount) print('Do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount))
if not options.update and chaptercount == urlchaptercount and adapter.getConfig('do_update_hook'): if not options.update and chaptercount == urlchaptercount and adapter.getConfig('do_update_hook'):
adapter.hookForUpdates(chaptercount) adapter.hookForUpdates(chaptercount)
@ -453,8 +453,8 @@ def do_download(arg,
metadata['output_filename'] = output_filename metadata['output_filename'] = output_filename
if options.jsonmeta: if options.jsonmeta:
import json import json
print json.dumps(metadata, sort_keys=True, print(json.dumps(metadata, sort_keys=True,
indent=2, separators=(',', ':')) indent=2, separators=(',', ':')))
else: else:
pprint.pprint(metadata) pprint.pprint(metadata)
@ -469,13 +469,13 @@ def do_download(arg,
del adapter del adapter
except exceptions.InvalidStoryURL as isu: except exceptions.InvalidStoryURL as isu:
print isu print(isu)
except exceptions.StoryDoesNotExist as dne: except exceptions.StoryDoesNotExist as dne:
print dne print(dne)
except exceptions.UnknownSite as us: except exceptions.UnknownSite as us:
print us print(us)
except exceptions.AccessDenied as ad: except exceptions.AccessDenied as ad:
print ad print(ad)
def get_configuration(url, def get_configuration(url,
passed_defaultsini, passed_defaultsini,
@ -485,7 +485,7 @@ def get_configuration(url,
output_filename=None): output_filename=None):
try: try:
configuration = Configuration(adapters.getConfigSectionsFor(url), options.format) configuration = Configuration(adapters.getConfigSectionsFor(url), options.format)
except exceptions.UnknownSite, e: except exceptions.UnknownSite as e:
if options.list or options.normalize or options.downloadlist: if options.list or options.normalize or options.downloadlist:
# list for page doesn't have to be a supported site. # list for page doesn't have to be a supported site.
configuration = Configuration(['unknown'], options.format) configuration = Configuration(['unknown'], options.format)
@ -524,7 +524,7 @@ def get_configuration(url,
try: try:
configuration.add_section('overrides') configuration.add_section('overrides')
except ConfigParser.DuplicateSectionError: except configparser.DuplicateSectionError:
pass pass
if options.force: if options.force:

View file

@ -1,554 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from optparse import OptionParser, SUPPRESS_HELP
from os.path import expanduser, join, dirname
from os import access, R_OK
from subprocess import call
from six import StringIO
from six.moves import configparser
import getpass
import logging
import pprint
import string
import sys
import pickle
import http.cookiejar as cl
version="2.27.8"
print("Python Version:%s"%sys.version)
if sys.version_info < (3,0):
print('this program requires python 3 or newer.')
sys.exit(1)
# if sys.version_info >= (2, 7):
# # suppresses default logger. logging is setup in fanficfare/__init__.py so it works in calibre, too.
# rootlogger = logging.getlogger()
# loghandler = logging.nullhandler()
# loghandler.setformatter(logging.formatter('(=====)(levelname)s:%(message)s'))
# rootlogger.addhandler(loghandler)
logger = logging.getLogger('fanficfare')
try:
# running under calibre
from calibre_plugins.fanficfare_plugin.fanficfare import adapters, writers, exceptions
from calibre_plugins.fanficfare_plugin.fanficfare.configurable import Configuration
from calibre_plugins.fanficfare_plugin.fanficfare.epubutils import (
get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
from calibre_plugins.fanficfare_plugin.fanficfare.geturls import get_urls_from_page, get_urls_from_imap
except ImportError:
from fanficfare import adapters, writers, exceptions
from fanficfare.configurable import Configuration
from fanficfare.epubutils import (
get_dcsource_chaptercount, get_update_data, reset_orig_chapters_epub)
from fanficfare.geturls import get_urls_from_page, get_urls_from_imap
def write_story(config, adapter, writeformat, metaonly=False, outstream=None):
writer = writers.getWriter(writeformat, config, adapter)
writer.writeStory(outstream=outstream, metaonly=metaonly)
output_filename = writer.getOutputFileName()
del writer
return output_filename
def main(argv=None,
parser=None,
passed_defaultsini=None,
passed_personalini=None):
if argv is None:
argv = sys.argv[1:]
# read in args, anything starting with -- will be treated as --<varible>=<value>
if not parser:
parser = OptionParser('usage: %prog [options] [STORYURL]...')
parser.add_option('-f', '--format', dest='format', default='epub',
help='write story as FORMAT, epub(default), mobi, txt or html', metavar='FORMAT')
if passed_defaultsini:
config_help = 'read config from specified file(s) in addition to calibre plugin personal.ini, ~/.fanficfare/personal.ini, and ./personal.ini'
else:
config_help = 'read config from specified file(s) in addition to ~/.fanficfare/defaults.ini, ~/.fanficfare/personal.ini, ./defaults.ini, and ./personal.ini'
parser.add_option('-c', '--config',
action='append', dest='configfile', default=None,
help=config_help, metavar='CONFIG')
range_help = ' --begin and --end will be overridden by a chapter range on the STORYURL like STORYURL[1-2], STORYURL[-3], STORYURL[3-] or STORYURL[3]'
parser.add_option('-b', '--begin', dest='begin', default=None,
help='Begin with Chapter START.'+range_help, metavar='START')
parser.add_option('-e', '--end', dest='end', default=None,
help='End with Chapter END.'+range_help, metavar='END')
parser.add_option('-o', '--option',
action='append', dest='options',
help='set an option NAME=VALUE', metavar='NAME=VALUE')
parser.add_option('-m', '--meta-only',
action='store_true', dest='metaonly',
help='Retrieve metadata and stop. Or, if --update-epub, update metadata title page only.', )
parser.add_option('--json-meta',
action='store_true', dest='jsonmeta',
help='When used with --meta-only, output metadata as JSON. No effect without --meta-only flag', )
parser.add_option('-u', '--update-epub',
action='store_true', dest='update',
help='Update an existing epub(if present) with new chapters. Give either epub filename or story URL.', )
parser.add_option('--update-cover',
action='store_true', dest='updatecover',
help='Update cover in an existing epub, otherwise existing cover (if any) is used on update. Only valid with --update-epub.', )
parser.add_option('--unnew',
action='store_true', dest='unnew',
help='Remove (new) chapter marks left by mark_new_chapters setting.', )
parser.add_option('--force',
action='store_true', dest='force',
help='Force overwrite of an existing epub, download and overwrite all chapters.', )
parser.add_option('-i', '--infile',
help='Give a filename to read for URLs (and/or existing EPUB files with --update-epub).',
dest='infile', default=None,
metavar='INFILE')
parser.add_option('-l', '--list',
dest='list', default=None, metavar='URL',
help='Get list of valid story URLs from page given.', )
parser.add_option('-n', '--normalize-list',
dest='normalize', default=None, metavar='URL',
help='Get list of valid story URLs from page given, but normalized to standard forms.', )
parser.add_option('--download-list',
dest='downloadlist', default=None, metavar='URL',
help='Download story URLs retrieved from page given. Update existing EPUBs if used with --update-epub.', )
parser.add_option('--imap',
action='store_true', dest='imaplist',
help='Get list of valid story URLs from unread email from IMAP account configured in ini.', )
parser.add_option('--download-imap',
action='store_true', dest='downloadimap',
help='Download valid story URLs from unread email from IMAP account configured in ini. Update existing EPUBs if used with --update-epub.', )
parser.add_option('-s', '--sites-list',
action='store_true', dest='siteslist', default=False,
help='Get list of valid story URLs examples.', )
parser.add_option('--non-interactive',
action='store_false', dest='interactive', default=sys.stdin.isatty() and sys.stdout.isatty(),
help='Prevent interactive prompts (for scripting).', )
parser.add_option('-d', '--debug',
action='store_true', dest='debug',
help='Show debug and notice output.', )
parser.add_option('-p', '--progressbar',
action='store_true', dest='progressbar',
help='Display a simple progress bar while downloading--one dot(.) per network fetch.', )
parser.add_option('-v', '--version',
action='store_true', dest='version',
help='Display version and quit.', )
## undocumented feature for development use. Save page cache and
## cookies between runs. Saves in PWD as files global_cache and
## global_cookies
parser.add_option('--save-cache', '--save_cache',
action='store_true', dest='save_cache',
help=SUPPRESS_HELP, )
options, args = parser.parse_args(argv)
if options.version:
print("Version: %s" % version)
return
if not options.debug:
logger.setLevel(logging.WARNING)
list_only = any((options.imaplist,
options.siteslist,
options.list,
options.normalize,
))
if list_only and (args or any((options.downloadimap,
options.downloadlist))):
parser.error('Incorrect arguments: Cannot download and list URLs at the same time.')
if options.siteslist:
for site, examples in adapters.getSiteExamples():
print('\n#### %s\nExample URLs:' % site)
for u in examples:
print(' * %s' % u)
return
if options.update and options.format != 'epub':
parser.error('-u/--update-epub only works with epub')
if options.unnew and options.format != 'epub':
parser.error('--unnew only works with epub')
urls=args
if not list_only and not (args or any((options.infile,
options.downloadimap,
options.downloadlist))):
parser.print_help();
return
if options.list:
configuration = get_configuration(options.list,
passed_defaultsini,
passed_personalini,options)
retlist = get_urls_from_page(options.list, configuration)
print('\n'.join(retlist))
if options.normalize:
configuration = get_configuration(options.normalize,
passed_defaultsini,
passed_personalini,options)
retlist = get_urls_from_page(options.normalize, configuration,normalize=True)
print('\n'.join(retlist))
if options.downloadlist:
configuration = get_configuration(options.downloadlist,
passed_defaultsini,
passed_personalini,options)
retlist = get_urls_from_page(options.downloadlist, configuration)
urls.extend(retlist)
if options.imaplist or options.downloadimap:
# list doesn't have a supported site.
configuration = get_configuration('test1.com',passed_defaultsini,passed_personalini,options)
markread = configuration.getConfig('imap_mark_read') == 'true' or \
(configuration.getConfig('imap_mark_read') == 'downloadonly' and options.downloadimap)
retlist = get_urls_from_imap(configuration.getConfig('imap_server'),
configuration.getConfig('imap_username'),
configuration.getConfig('imap_password'),
configuration.getConfig('imap_folder'),
markread)
if options.downloadimap:
urls.extend(retlist)
else:
print('\n'.join(retlist))
# for passing in a file list
if options.infile:
with open(options.infile,"r") as infile:
#print("file exists and is readable")
for url in infile:
if '#' in url:
url = url[:url.find('#')].strip()
url = url.strip()
if len(url) > 0:
#print("url: (%s)"%url)
urls.append(url)
if options.save_cache:
try:
with open('global_cache','rb') as jin:
options.pagecache = pickle.load(jin) # ,encoding="utf-8"
options.cookiejar = cl.LWPCookieJar()
options.cookiejar.load('global_cookies')
except:
print("didn't load global_cache")
if not list_only:
if len(urls) < 1:
print("No valid story URLs found")
else:
for url in urls:
try:
do_download(url,
options,
passed_defaultsini,
passed_personalini)
#print("pagecache:%s"%options.pagecache.keys())
except Exception as e:
if len(urls) == 1:
raise
print("URL(%s) Failed: Exception (%s). Run URL individually for more detail."%(url,e))
if options.save_cache:
with open('global_cache','wb') as jout:
pickle.dump(options.pagecache,jout)
options.cookiejar.save('global_cookies')
# make rest a function and loop on it.
def do_download(arg,
options,
passed_defaultsini,
passed_personalini):
# Attempt to update an existing epub.
chaptercount = None
output_filename = None
if options.unnew:
# remove mark_new_chapters marks
reset_orig_chapters_epub(arg,arg)
return
if options.update:
try:
url, chaptercount = get_dcsource_chaptercount(arg)
if not url:
print('No story URL found in epub to update.')
return
print('Updating %s, URL: %s' % (arg, url))
output_filename = arg
except Exception:
# if there's an error reading the update file, maybe it's a URL?
# we'll look for an existing outputfile down below.
url = arg
else:
url = arg
configuration = get_configuration(url,
passed_defaultsini,
passed_personalini,
options,
chaptercount,
output_filename)
try:
# Allow chapter range with URL.
# like test1.com?sid=5[4-6] or [4,6]
# Overrides CLI options if present.
url,ch_begin,ch_end = adapters.get_url_chapter_range(url)
adapter = adapters.getAdapter(configuration, url)
## Share pagecache and cookiejar between multiple downloads.
if not hasattr(options,'pagecache'):
options.pagecache = configuration.get_empty_pagecache()
if not hasattr(options,'cookiejar'):
options.cookiejar = configuration.get_empty_cookiejar()
configuration.set_pagecache(options.pagecache)
configuration.set_cookiejar(options.cookiejar)
# url[begin-end] overrides CLI option if present.
if ch_begin or ch_end:
adapter.setChaptersRange(ch_begin, ch_end)
else:
adapter.setChaptersRange(options.begin, options.end)
# check for updating from URL (vs from file)
if options.update and not chaptercount:
try:
writer = writers.getWriter('epub', configuration, adapter)
output_filename = writer.getOutputFileName()
noturl, chaptercount = get_dcsource_chaptercount(output_filename)
print('Updating %s, URL: %s' % (output_filename, url))
except Exception:
options.update = False
pass
# Check for include_images without no_image_processing. In absence of PIL, give warning.
if adapter.getConfig('include_images') and not adapter.getConfig('no_image_processing'):
try:
from calibre.utils.magick import Image
except ImportError:
try:
## Pillow is a more current fork of PIL library
from PIL import Image
except ImportError:
try:
import Image
except ImportError:
print("You have include_images enabled, but Python Image Library(PIL) isn't found.\nImages will be included full size in original format.\nContinue? (y/n)?")
if options.interactive:
if not sys.stdin.readline().strip().lower().startswith('y'):
return
else:
# for non-interactive, default the response to yes and continue processing
print('y')
# three tries, that's enough if both user/pass & is_adult needed,
# or a couple tries of one or the other
for x in range(0, 2):
try:
adapter.getStoryMetadataOnly()
except exceptions.FailedToLogin as f:
if not options.interactive:
print('Login Failed on non-interactive process. Set username and password in personal.ini.')
return
if f.passwdonly:
print('Story requires a password.')
else:
print('Login Failed, Need Username/Password.')
sys.stdout.write('Username: ')
adapter.username = sys.stdin.readline().strip()
adapter.password = getpass.getpass(prompt='Password: ')
# print('Login: `%s`, Password: `%s`' % (adapter.username, adapter.password))
except exceptions.AdultCheckRequired:
if options.interactive:
print('Please confirm you are an adult in your locale: (y/n)?')
if sys.stdin.readline().strip().lower().startswith('y'):
adapter.is_adult = True
else:
print('Adult check required on non-interactive process. Set is_adult:true in personal.ini or pass -o "is_adult=true" to the command.')
return
if options.update and not options.force:
urlchaptercount = int(adapter.getStoryMetadataOnly().getMetadata('numChapters').replace(',',''))
# returns int adjusted for start-end range.
urlchaptercount = adapter.getStoryMetadataOnly().getChapterCount()
if chaptercount == urlchaptercount and not options.metaonly:
print('%s already contains %d chapters.' % (output_filename, chaptercount))
elif chaptercount > urlchaptercount:
print('%s contains %d chapters, more than source: %d.' % (output_filename, chaptercount, urlchaptercount))
elif chaptercount == 0:
print("%s doesn't contain any recognizable chapters, probably from a different source. Not updating." % output_filename)
else:
# update now handled by pre-populating the old
# images and chapters in the adapter rather than
# merging epubs.
(url,
chaptercount,
adapter.oldchapters,
adapter.oldimgs,
adapter.oldcover,
adapter.calibrebookmark,
adapter.logfile,
adapter.oldchaptersmap,
adapter.oldchaptersdata) = (get_update_data(output_filename))[0:9]
print('Do update - epub(%d) vs url(%d)' % (chaptercount, urlchaptercount))
if not options.update and chaptercount == urlchaptercount and adapter.getConfig('do_update_hook'):
adapter.hookForUpdates(chaptercount)
if adapter.getConfig('pre_process_safepattern'):
metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('pre_process_safepattern'))
else:
metadata = adapter.story.getAllMetadata()
call(string.Template(adapter.getConfig('pre_process_cmd')).substitute(metadata), shell=True)
write_story(configuration, adapter, 'epub')
else:
# regular download
if options.metaonly:
metadata = adapter.getStoryMetadataOnly().getAllMetadata()
metadata['zchapters'] = []
for i, chap in enumerate(adapter.get_chapters()):
metadata['zchapters'].append((i+1,chap))
if not options.metaonly and adapter.getConfig('pre_process_cmd'):
if adapter.getConfig('pre_process_safepattern'):
metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('pre_process_safepattern'))
else:
metadata = adapter.story.getAllMetadata()
call(string.Template(adapter.getConfig('pre_process_cmd')).substitute(metadata), shell=True)
output_filename = write_story(configuration, adapter, options.format, options.metaonly)
if options.metaonly:
metadata['output_filename'] = output_filename
if options.jsonmeta:
import json
print(json.dumps(metadata, sort_keys=True,
indent=2, separators=(',', ':')))
else:
pprint.pprint(metadata)
if not options.metaonly and adapter.getConfig('post_process_cmd'):
if adapter.getConfig('post_process_safepattern'):
metadata = adapter.story.get_filename_safe_metadata(pattern=adapter.getConfig('post_process_safepattern'))
else:
metadata = adapter.story.getAllMetadata()
metadata['output_filename'] = output_filename
call(string.Template(adapter.getConfig('post_process_cmd')).substitute(metadata), shell=True)
del adapter
except exceptions.InvalidStoryURL as isu:
print(isu)
except exceptions.StoryDoesNotExist as dne:
print(dne)
except exceptions.UnknownSite as us:
print(us)
except exceptions.AccessDenied as ad:
print(ad)
def get_configuration(url,
passed_defaultsini,
passed_personalini,
options,
chaptercount=None,
output_filename=None):
try:
configuration = Configuration(adapters.getConfigSectionsFor(url), options.format)
except exceptions.UnknownSite as e:
if options.list or options.normalize or options.downloadlist:
# list for page doesn't have to be a supported site.
configuration = Configuration(['unknown'], options.format)
else:
raise e
conflist = []
homepath = join(expanduser('~'), '.fanficdownloader')
## also look for .fanficfare now, give higher priority than old dir.
homepath2 = join(expanduser('~'), '.fanficfare')
if passed_defaultsini:
# new StringIO each time rather than pass StringIO and rewind
# for case of list download. Just makes more sense to me.
configuration.readfp(StringIO(passed_defaultsini))
else:
# don't need to check existance for our selves.
conflist.append(join(dirname(__file__), 'defaults.ini'))
conflist.append(join(homepath, 'defaults.ini'))
conflist.append(join(homepath2, 'defaults.ini'))
conflist.append('defaults.ini')
if passed_personalini:
# new StringIO each time rather than pass StringIO and rewind
# for case of list download. Just makes more sense to me.
configuration.readfp(StringIO(passed_personalini))
conflist.append(join(homepath, 'personal.ini'))
conflist.append(join(homepath2, 'personal.ini'))
conflist.append('personal.ini')
if options.configfile:
conflist.extend(options.configfile)
configuration.read(conflist)
try:
configuration.add_section('overrides')
except configparser.DuplicateSectionError:
pass
if options.force:
configuration.set('overrides', 'always_overwrite', 'true')
if options.update and chaptercount and output_filename:
configuration.set('overrides', 'output_filename', output_filename)
if options.update and not options.updatecover:
configuration.set('overrides', 'never_make_cover', 'true')
# images only for epub, even if the user mistakenly turned it
# on else where.
if options.format not in ('epub', 'html'):
configuration.set('overrides', 'include_images', 'false')
if options.options:
for opt in options.options:
(var, val) = opt.split('=')
configuration.set('overrides', var, val)
if options.progressbar:
configuration.set('overrides','progressbar','true')
return configuration
if __name__ == '__main__':
main()