Working towards python 2.7 & 3 cross compatibility.

This commit is contained in:
Jim Miller 2018-07-26 15:19:21 -05:00
parent 1a2392a8c8
commit cea3773e4f
3 changed files with 22 additions and 18 deletions

View file

@ -22,9 +22,11 @@ import codecs
import six.moves.configparser as ConfigParser
from six.moves.configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
from six.moves import urllib
from six.moves import urllib as u2
from six.moves.urllib.parse import urlparse as up
from six.moves.urllib.request import (build_opener, HTTPCookieProcessor)
from six.moves.urllib import parse as up
from six.moves import http_cookiejar as cl
# unicode in py2, str in py3
from six import text_type as unicode
import time
import logging
@ -71,7 +73,8 @@ from gziphttp import GZipProcessor
logger = logging.getLogger(__name__)
# It's all fault of David Beazley!
# Work around for fact that py3 apparently doesn't allow/ignore
# recursive imports like py2 does.
try:
from . import adapters
except ImportError:
@ -481,7 +484,7 @@ def make_generate_cover_settings(param):
for line in param.splitlines():
if "=>" in line:
try:
(template,regexp,setting) = map( lambda x: x.strip(), line.split("=>") )
(template,regexp,setting) = [ x.strip() for x in line.split("=>") ]
re_compile(regexp,line)
vlist.append((template,regexp,setting))
except Exception as e:
@ -537,7 +540,7 @@ class Configuration(ConfigParser.SafeConfigParser):
self.override_sleep = None
self.cookiejar = self.get_empty_cookiejar()
self.opener = u2.build_opener(u2.HTTPCookieProcessor(self.cookiejar),GZipProcessor())
self.opener = build_opener(HTTPCookieProcessor(self.cookiejar),GZipProcessor())
self.pagecache = self.get_empty_pagecache()
@ -550,9 +553,9 @@ class Configuration(ConfigParser.SafeConfigParser):
## reconstructed completely because removing and re-adding
## a section would mess up the order.
## assumes _dict and _sections from ConfigParser parent.
self._sections = self._dict((section_url_f(k) if (domain in k and 'http' in k) else k, v) for k, v in self._sections.viewitems())
self._sections = self._dict((section_url_f(k) if (domain in k and 'http' in k) else k, v) for k, v in six.viewitems(self._sections))
# logger.debug(self._sections.keys())
except e:
except Exception as e:
logger.warn("Failed to perform section_url_names: %s"%e)
def addUrlConfigSection(self,url):
@ -896,7 +899,7 @@ class Configuration(ConfigParser.SafeConfigParser):
def set_cookiejar(self,cj):
self.cookiejar = cj
saveheaders = self.opener.addheaders
self.opener = u2.build_opener(u2.HTTPCookieProcessor(self.cookiejar),GZipProcessor())
self.opener = build_opener(HTTPCookieProcessor(self.cookiejar),GZipProcessor())
self.opener.addheaders = saveheaders
def load_cookiejar(self,filename):
@ -1006,13 +1009,13 @@ class Configuration(ConfigParser.SafeConfigParser):
logger.debug("#####################################\npagecache(POST) MISS: %s"%safe_url(cachekey))
self.do_sleep(extrasleep)
## u2.Request assumes POST when data!=None. Also assumes data
## urllib.Request assumes POST when data!=None. Also assumes data
## is application/x-www-form-urlencoded.
if 'Content-type' not in headers:
headers['Content-type']='application/x-www-form-urlencoded'
if 'Accept' not in headers:
headers['Accept']="text/html,*/*"
req = u2.Request(url,
req = urllib.Request(url,
data=urllib.urlencode(parameters),
headers=headers)
@ -1122,7 +1125,7 @@ class Configuration(ConfigParser.SafeConfigParser):
extrasleep=extrasleep,
referer=referer)
return (self._decode(data),opened)
except u2.HTTPError as he:
except urllib.HTTPError as he:
excpt=he
if he.code in (403,404,410):
logger.debug("Caught an exception reading URL: %s Exception %s."%(unicode(safe_url(url)),unicode(he)))

View file

@ -20,6 +20,7 @@ import re
import os.path
import datetime
import string
import six
from six import StringIO
import zipfile
from zipfile import ZipFile, ZIP_DEFLATED
@ -65,8 +66,8 @@ class BaseStoryWriter(Configurable):
return self.story.formatFileName(self.getConfig('zip_filename'),self.getConfig('allow_unsafe_filename'))
def _write(self, out, text):
# instead of text.encode('utf8')
out.write(six.ensure_text(text))
# instead of six.ensure_text(text)
out.write(text.encode('utf8'))
def writeTitlePage(self, out, START, ENTRY, END, WIDE_ENTRY=None, NO_TITLE_ENTRY=None):
"""

View file

@ -303,7 +303,7 @@ div { margin: 0pt; padding: 0pt; }
## not on an open stream. OTOH, I suspect we would have had
## problems with closing and opening again to change the
## compression type anyway.
zipio = StringIO.StringIO()
zipio = StringIO()
## mimetype must be first file and uncompressed. Python 2.5
## ZipFile can't change compression type file-by-file, so we
@ -518,7 +518,7 @@ div { margin: 0pt; padding: 0pt; }
COVER = string.Template(self.getConfig("cover_content"))
else:
COVER = self.EPUB_COVER
coverIO = StringIO.StringIO()
coverIO = StringIO()
coverIO.write(COVER.substitute(dict(self.story.getAllMetadata().items()+{'coverimg':self.story.cover}.items())))
if self.getConfig("include_titlepage"):
@ -655,7 +655,7 @@ div { margin: 0pt; padding: 0pt; }
outputepub.writestr("OEBPS/cover.xhtml",coverIO.getvalue())
coverIO.close()
titlepageIO = StringIO.StringIO()
titlepageIO = StringIO()
self.writeTitlePage(out=titlepageIO,
START=TITLE_PAGE_START,
ENTRY=TITLE_ENTRY,
@ -667,7 +667,7 @@ div { margin: 0pt; padding: 0pt; }
titlepageIO.close()
# write toc page.
tocpageIO = StringIO.StringIO()
tocpageIO = StringIO()
self.writeTOCPage(tocpageIO,
self.EPUB_TOC_PAGE_START,
self.EPUB_TOC_ENTRY,
@ -678,7 +678,7 @@ div { margin: 0pt; padding: 0pt; }
if dologpage:
# write log page.
logpageIO = StringIO.StringIO()
logpageIO = StringIO()
self.writeLogPage(logpageIO)
outputepub.writestr("OEBPS/log_page.xhtml",logpageIO.getvalue())
logpageIO.close()