mirror of
https://github.com/JimmXinu/FanFicFare.git
synced 2025-12-06 08:52:55 +01:00
Remove web service code. Refer back to tag v3.13.0 if ever needed again.
This commit is contained in:
parent
a0e2db3925
commit
45bc88d9bf
23 changed files with 20 additions and 1455 deletions
|
|
@ -16,19 +16,15 @@
|
|||
#
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
# just a way to switch between web service and CLI/PI
|
||||
import google.appengine.api
|
||||
try: # just a way to switch between CLI and PI
|
||||
import calibre.constants
|
||||
except:
|
||||
try: # just a way to switch between CLI and PI
|
||||
import calibre.constants
|
||||
except:
|
||||
import sys
|
||||
if sys.version_info >= (2, 7):
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
loghandler=logging.StreamHandler()
|
||||
loghandler.setFormatter(logging.Formatter("FFF: %(levelname)s: %(asctime)s: %(filename)s(%(lineno)d): %(message)s"))
|
||||
logger.addHandler(loghandler)
|
||||
loghandler.setLevel(logging.DEBUG)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
import sys
|
||||
if sys.version_info >= (2, 7):
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
loghandler=logging.StreamHandler()
|
||||
loghandler.setFormatter(logging.Formatter("FFF: %(levelname)s: %(asctime)s: %(filename)s(%(lineno)d): %(message)s"))
|
||||
logger.addHandler(loghandler)
|
||||
loghandler.setLevel(logging.DEBUG)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
|
|
|||
|
|
@ -39,21 +39,7 @@ import pickle
|
|||
|
||||
from . import exceptions
|
||||
|
||||
try:
|
||||
from google.appengine.api import apiproxy_stub_map
|
||||
def urlfetch_timeout_hook(service, call, request, response):
|
||||
if call != 'Fetch':
|
||||
return
|
||||
# Make the default deadline 10 seconds instead of 5.
|
||||
if not request.has_deadline():
|
||||
request.set_deadline(10.0)
|
||||
|
||||
apiproxy_stub_map.apiproxy.GetPreCallHooks().Append(
|
||||
'urlfetch_timeout_hook', urlfetch_timeout_hook, 'urlfetch')
|
||||
logger.info("Hook to make default deadline 10.0 installed.")
|
||||
except:
|
||||
pass
|
||||
#logger.info("Hook to make default deadline 10.0 NOT installed--not using appengine")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import chardet
|
||||
|
|
@ -78,8 +64,6 @@ from .htmlcleanup import reduce_zalgo
|
|||
# [overrides]
|
||||
# titlepage_entries: category
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Work around for fact that py3 apparently doesn't allow/ignore
|
||||
# recursive imports like py2 does.
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -195,15 +195,6 @@ def fit_image(width, height, pwidth, pheight):
|
|||
|
||||
return scaled, int(width), int(height)
|
||||
|
||||
try:
|
||||
# doesn't really matter what, just checking for appengine.
|
||||
from google.appengine.api import apiproxy_stub_map
|
||||
|
||||
is_appengine = True
|
||||
except:
|
||||
is_appengine = False
|
||||
|
||||
|
||||
try:
|
||||
from calibre.library.comments import sanitize_comments_html
|
||||
except:
|
||||
|
|
@ -1170,11 +1161,6 @@ class Story(Configurable):
|
|||
url = url.strip() # ran across an image with a space in the
|
||||
# src. Browser handled it, so we'd better, too.
|
||||
|
||||
# appengine (web version) isn't allowed to do images--just
|
||||
# gets too big too fast and breaks things.
|
||||
if is_appengine:
|
||||
return (None,None)
|
||||
|
||||
## Mistakenly ended up with some // in image urls, like:
|
||||
## https://forums.spacebattles.com//styles/default/xenforo/clear.png
|
||||
## Removing one /, but not ://
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ version_files = [
|
|||
# 'version_test.txt',
|
||||
'setup.py',
|
||||
'calibre-plugin/__init__.py',
|
||||
'webservice/app.yaml',
|
||||
# 'webservice/app.yaml',
|
||||
'fanficfare/cli.py',
|
||||
]
|
||||
|
||||
|
|
@ -55,12 +55,13 @@ version="2.3.6"
|
|||
|
||||
do_loop(version_files, version_re, version_subs)
|
||||
|
||||
index_files = ['webservice/index.html']
|
||||
if saved_version:
|
||||
## only do major/minor, always leave micro 0 in index.html.
|
||||
index_re = 'https://([0-9-]+[a-z]?)\\.fanficfare\\.appspot\\.com'
|
||||
index_subs = 'https://%s-%s-0.fanficfare.appspot.com'%saved_version[0:2]
|
||||
do_loop(index_files, index_re, index_subs)
|
||||
index_files = []
|
||||
# index_files = ['webservice/index.html']
|
||||
# if saved_version:
|
||||
# ## only do major/minor, always leave micro 0 in index.html.
|
||||
# index_re = 'https://([0-9-]+[a-z]?)\\.fanficfare\\.appspot\\.com'
|
||||
# index_subs = 'https://%s-%s-0.fanficfare.appspot.com'%saved_version[0:2]
|
||||
# do_loop(index_files, index_re, index_subs)
|
||||
|
||||
release = 'Release'
|
||||
if int(args[-1]) > 0:
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
Before uploading the webservice to Google AppEngine, the files here,
|
||||
along with ../fanficfare and the contents of
|
||||
../included_dependencies should be copied to a 'build' (or other)
|
||||
directory.
|
||||
|
||||
rm -rf build
|
||||
mkdir build
|
||||
|
||||
cp -R * build
|
||||
cp -R ../fanficfare ../included_dependencies/* build
|
||||
|
||||
cd build
|
||||
|
||||
.../appcfg.py update .
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<html>
|
||||
<head>
|
||||
<link href="/css/index.css" rel="stylesheet" type="text/css">
|
||||
<title>FanFicFare (fanfiction.net, fanficauthors, fictionalley, ficwad to epub and HTML)</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<div id='main'>
|
||||
<h1>
|
||||
<a href="/" style="text-decoration: none; color: black;">FanFicFare</a>
|
||||
</h1>
|
||||
|
||||
<div class="borderbox">
|
||||
{% for fic in fics %}
|
||||
<p>
|
||||
<a href="{{ fic.url }}" title="Link to original story"><span class="recent"><i>{{ fic.title }}</i></span></a>
|
||||
by <a href="{{ fic.authorUrl }}">{{ fic.author }}</a> <b>Download Count:</b> {{ fic.count }} <br />
|
||||
<b>Word Count:</b> {{ fic.numWords }} <b>Chapter Count:</b> {{ fic.numChapters }}<br />
|
||||
{% if fic.category %} <b>Categories:</b> {{ fic.category }} <br /> {% endif %}
|
||||
{% if fic.genre %} <b>Genres:</b> {{ fic.genre }} <br /> {% endif %}
|
||||
{% if fic.language %} <b>Language:</b> {{ fic.language }} <br /> {% endif %}
|
||||
{% if fic.series %} <b>Series:</b> {{ fic.series }} <br /> {% endif %}
|
||||
{% if fic.characters %} <b>Characters:</b> {{ fic.characters }} <br /> {% endif %}
|
||||
{% if fic.status %} <b>Status:</b> {{ fic.status }} <br /> {% endif %}
|
||||
{% if fic.datePublished %} <b>Published:</b> {{ fic.datePublished }} <br /> {% endif %}
|
||||
{% if fic.dateUpdated %} <b>Last Updated:</b> {{ fic.dateUpdated }} <br /> {% endif %}
|
||||
{% if fic.dateCreated %} <b>Last Downloaded:</b> {{ fic.dateCreated }} <br /> {% endif %}
|
||||
{% if fic.rating %} <b>Rating:</b> {{ fic.rating }} <br /> {% endif %}
|
||||
{% if fic.warnings %} <b>Warnings:</b> {{ fic.warnings }} <br /> {% endif %}
|
||||
{% if fic.description %} <b>Summary:</b> {{ fic.description }} <br /> {% endif %}
|
||||
</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
# ffd-retief-hrd fanficfare
|
||||
application: fanficfare
|
||||
version: 3-14-1
|
||||
runtime: python27
|
||||
api_version: 1
|
||||
threadsafe: true
|
||||
basic_scaling:
|
||||
max_instances: 1
|
||||
|
||||
handlers:
|
||||
|
||||
- url: /r3m0v3r.*
|
||||
script: utils.remover.app
|
||||
login: admin
|
||||
|
||||
- url: /tally.*
|
||||
script: utils.tally.app
|
||||
login: admin
|
||||
|
||||
- url: /fdownloadtask
|
||||
script: main.app
|
||||
login: admin
|
||||
|
||||
- url: /css
|
||||
static_dir: css
|
||||
|
||||
# - url: /js
|
||||
# static_dir: js
|
||||
|
||||
- url: /static
|
||||
static_dir: static
|
||||
|
||||
- url: /favicon\.ico
|
||||
static_files: static/favicon.ico
|
||||
upload: static/favicon\.ico
|
||||
|
||||
- url: /.*
|
||||
script: main.app
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
cron:
|
||||
- description: cleanup job
|
||||
url: /r3m0v3r
|
||||
schedule: every 2 hours
|
||||
|
||||
# There's a bug in the Python 2.7 runtime that prevents this from
|
||||
# working properly. In theory, there should never be orphans anyway.
|
||||
#- description: orphan cleanup job
|
||||
# url: /r3m0v3rOrphans
|
||||
# schedule: every 4 hours
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
body
|
||||
{
|
||||
font: 0.9em "Helvetica Neue", Arial, Helvetica, Geneva, sans-serif;
|
||||
}
|
||||
|
||||
#main
|
||||
{
|
||||
width: 60%;
|
||||
margin-left: 20%;
|
||||
background-color: #dae6ff;
|
||||
padding: 2em;
|
||||
}
|
||||
|
||||
#greeting
|
||||
{
|
||||
# margin-bottom: 1em;
|
||||
border-color: #efefef;
|
||||
}
|
||||
|
||||
|
||||
|
||||
.borderbox:hover
|
||||
{
|
||||
border: thin solid #fffeff;
|
||||
}
|
||||
|
||||
h1
|
||||
{
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.borderbox
|
||||
{
|
||||
margin: 1em;
|
||||
padding: 1em;
|
||||
border: thin dotted #fffeff;
|
||||
}
|
||||
|
||||
div.field
|
||||
{
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
|
||||
#submitbtn
|
||||
{
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
#typeoptions
|
||||
{
|
||||
margin-top: 0.5em;
|
||||
}
|
||||
|
||||
#error
|
||||
{
|
||||
color: #f00;
|
||||
}
|
||||
.recent {
|
||||
font-size: large;
|
||||
}
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<html>
|
||||
<head>
|
||||
<link href="/css/index.css" rel="stylesheet" type="text/css">
|
||||
<title>FanFicFare - read fanfiction from twilighted.net, fanfiction.net, fictionpress.com, fictionalley.org, ficwad.com, potionsandsnitches.net, harrypotterfanfiction.com, mediaminer.org on Kindle, Nook, Sony Reader, iPad, iPhone, Android, Aldiko, Stanza</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<div id='main' style="width: 80%; margin-left: 10%;">
|
||||
<h1>
|
||||
<a href="/" style="text-decoration: none; color: black;">FanFicFare</a>
|
||||
</h1>
|
||||
|
||||
<form action="/editconfig" method="post">
|
||||
<input type="hidden" name="update" value="true" />
|
||||
<div id='logpasswordtable'>
|
||||
<h3>Edit Config</h3>
|
||||
<div id='logpassword'>
|
||||
Editing configuration for {{ nickname }}.
|
||||
</div>
|
||||
<div class='fieldandlabel'>
|
||||
<textarea name="config" style="width: 100%; height: 200px;" wrap='off'>{{ config }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id='submitbtn'>
|
||||
<input type="submit" value="Save">
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div>
|
||||
<h3>Default System configuration</h3>
|
||||
<pre>
|
||||
{{ defaultsini }}
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
<div style='text-align: center'>
|
||||
<img src="http://code.google.com/appengine/images/appengine-silver-120x30.gif"
|
||||
alt="Powered by Google App Engine" />
|
||||
<br/><br/>
|
||||
This is a web front-end to <a href="https://github.com/JimmXinu/FanFicFare/">FanFicFare</a><br/>
|
||||
Copyright © FanFicFare team
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
# Copyright 2011 Fanficdownloader team
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import pickle, copy
|
||||
from google.appengine.ext import db
|
||||
|
||||
class ObjectProperty(db.Property):
|
||||
data_type = db.Blob
|
||||
|
||||
def get_value_for_datastore(self, model_instance):
|
||||
value = self.__get__(model_instance, model_instance.__class__)
|
||||
pickled_val = pickle.dumps(value,protocol=pickle.HIGHEST_PROTOCOL)
|
||||
if value is not None: return db.Blob(pickled_val)
|
||||
|
||||
def make_value_from_datastore(self, value):
|
||||
if value is not None: return pickle.loads(value)
|
||||
|
||||
def default_value(self):
|
||||
return copy.copy(self.default)
|
||||
|
||||
class DownloadMeta(db.Model):
|
||||
user = db.UserProperty()
|
||||
url = db.StringProperty()
|
||||
name = db.StringProperty()
|
||||
title = db.StringProperty()
|
||||
author = db.StringProperty()
|
||||
format = db.StringProperty()
|
||||
failure = db.TextProperty()
|
||||
completed = db.BooleanProperty(default=False)
|
||||
date = db.DateTimeProperty(auto_now_add=True)
|
||||
version = db.StringProperty()
|
||||
ch_begin = db.StringProperty()
|
||||
ch_end = db.StringProperty()
|
||||
# data_chunks is implicit from DownloadData def.
|
||||
|
||||
class DownloadData(db.Model):
|
||||
download = db.ReferenceProperty(DownloadMeta,
|
||||
collection_name='data_chunks')
|
||||
blob = db.BlobProperty()
|
||||
index = db.IntegerProperty()
|
||||
|
||||
class UserConfig(db.Model):
|
||||
user = db.UserProperty()
|
||||
config = db.BlobProperty()
|
||||
|
||||
class SavedMeta(db.Model):
|
||||
url = db.StringProperty()
|
||||
title = db.StringProperty()
|
||||
author = db.StringProperty()
|
||||
date = db.DateTimeProperty(auto_now_add=True)
|
||||
count = db.IntegerProperty()
|
||||
meta = ObjectProperty()
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<html>
|
||||
<head>
|
||||
<link href="/css/index.css" rel="stylesheet" type="text/css">
|
||||
<title>FanFicFare - read Fanfiction from fanfiction.net, archiveofourown.org, fimfiction.net, fictionpress.com, fictionalley.org, ficwad.com, potionsandsnitches.net, harrypotterfanfiction.com, mediaminer.org and many others on Kindle, Nook, Sony Reader, iPad, iPhone, Android, Aldiko, Stanza, etc</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<div id='main'>
|
||||
<h1>
|
||||
<a href="/" style="text-decoration: none; color: black;">FanFicFare</a>
|
||||
</h1>
|
||||
<div class="borderbox">
|
||||
<h3>Closing FanFicFare Web Service</h3>
|
||||
<p>Starting Nov 30, Google will only allow me to update the web service if I give them a way to bill me for 'building' the app.
|
||||
</p>
|
||||
<p>
|
||||
And once they have a way to bill me, they will charge when the service
|
||||
goes past the free limit unless manually turned off each time or more
|
||||
draconian scaling settings are used:
|
||||
</p>
|
||||
<p>
|
||||
<i>If you are currently relying on the free tier as a cost control
|
||||
mechanism, you will need to make a configuration change to maintain
|
||||
the current behavior. You must set a Cloud Budget Alert and manually
|
||||
shut off your app, or set the max_instances setting to 1 in app.yaml
|
||||
to never go above the free tier.</i>
|
||||
</p>
|
||||
<p>
|
||||
On Friday Nov 15, 2019, I uploaded the new version with the
|
||||
<i>max_instances:1</i> setting recommended. The service has been 'Over
|
||||
Quota' every time I've looked since then.
|
||||
</p>
|
||||
<p>
|
||||
I maintain FanFicFare as a hobby and for my own use. When I inherited
|
||||
the project from the original developer (Roman Kirillov), it already
|
||||
had the web service running on Google App Engine associated with it.
|
||||
</p>
|
||||
<p>
|
||||
I continued to support the web service in recent years as a legacy for
|
||||
the users who can't run the CLI or Calibre versions. But I'm not
|
||||
interested in spending my money on it, or dealing with the accounting
|
||||
and possible tax implications of collecting donations to run it.
|
||||
</p>
|
||||
<p>
|
||||
I plan to continue maintaining the Calibre Plugin and Python CLI
|
||||
versions of FanFicFare. But at this point, I'm shutting down the web
|
||||
service.
|
||||
</p>
|
||||
<p>
|
||||
If you are interested in possibly taking over or making a new web
|
||||
service, please join the conversation at
|
||||
the <a href="https://groups.google.com/forum/#!topic/fanfic-downloader/AjhctdzWsW0">FanFicFare
|
||||
Google Group</a>.
|
||||
</p>
|
||||
</div>
|
||||
<div style='text-align: center'>
|
||||
<img src="https://code.google.com/appengine/images/appengine-silver-120x30.gif"
|
||||
alt="Powered by Google App Engine" />
|
||||
<br/><br/>
|
||||
This is a web front-end to <A href="https://github.com/JimmXinu/FanFicFare">FanFicFare</a><br/>
|
||||
Copyright © FanFicFare team
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
indexes:
|
||||
|
||||
# notAUTOGENERATED
|
||||
|
||||
# This index.yaml is automatically updated whenever the dev_appserver
|
||||
# detects that a new type of query is run. If you want to manage the
|
||||
# index.yaml file manually, remove the above marker line (the line
|
||||
# saying "# AUTOGENERATED"). If you want to manage some indexes
|
||||
# manually, move them above the marker line. The index.yaml file is
|
||||
# automatically uploaded to the admin console when you next deploy
|
||||
# your application using appcfg.py.
|
||||
|
||||
- kind: DownloadData
|
||||
properties:
|
||||
- name: download
|
||||
- name: index
|
||||
|
||||
- kind: DownloadMeta
|
||||
properties:
|
||||
- name: user
|
||||
- name: date
|
||||
direction: desc
|
||||
|
||||
- kind: SavedMeta
|
||||
properties:
|
||||
- name: count
|
||||
- name: date
|
||||
direction: desc
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<html>
|
||||
<head>
|
||||
<link href="/css/index.css" rel="stylesheet" type="text/css">
|
||||
<title>Login Needed FanFicFare</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<div id='main'>
|
||||
<h1>
|
||||
<a href="/" style="text-decoration: none; color: black;">FanFicFare</a>
|
||||
</h1>
|
||||
|
||||
{% if fic.failure %}
|
||||
<div id='error'>
|
||||
{{ fic.failure }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<form action="/fdown" method="post">
|
||||
<input type="hidden" name="url" value='{{ url }}'>
|
||||
<input type="hidden" name="format" value='{{ format }}'>
|
||||
<div class="borderbox">
|
||||
|
||||
{% if is_login %}
|
||||
|
||||
{% if is_passwdonly %}
|
||||
<h3>Password</h3>
|
||||
<div class="borderbox">
|
||||
{{ site }} requires a Password for this story.<br>
|
||||
You need to provide the Password for this story
|
||||
to download it.
|
||||
</div>
|
||||
{% else %}
|
||||
<h3>Login / Password</h3>
|
||||
<div class="borderbox">
|
||||
{{ site }} requires a Login/Password for this story.<br>
|
||||
You need to provide your Login/Password for {{ site }}
|
||||
to download it.
|
||||
</div>
|
||||
<div class='fieldandlabel'>
|
||||
<div class='label'>Login</div>
|
||||
<div class='field'><input type='text' name='login' size='50'></div>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class='fieldandlabel'>
|
||||
<div class='label'>Password</div>
|
||||
<div class='field'><input type='password' name='password' size='50'></div>
|
||||
</div>
|
||||
|
||||
{% else %}
|
||||
|
||||
<input type="hidden" name="login" value='{{ login }}'>
|
||||
<input type="hidden" name="password" value='{{ password }}'>
|
||||
<div class='fieldandlabel'>
|
||||
<div class='label'>Are you an Adult? <input type='checkbox' name='is_adult'></div>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
|
||||
<div id='submitbtn'>
|
||||
<input type="submit" value="Download">
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div style='text-align: center'>
|
||||
<img src="http://code.google.com/appengine/images/appengine-silver-120x30.gif"
|
||||
alt="Powered by Google App Engine" />
|
||||
<br/><br/>
|
||||
This is a web front-end to <a href="https://github.com/JimmXinu/FanFicFare/">FanFicFare</a><br/>
|
||||
Copyright © FanFicFare team
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,654 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2007 Google Inc.
|
||||
# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import logging
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
import os
|
||||
from os.path import dirname, basename, normpath
|
||||
import re
|
||||
import sys
|
||||
import zlib
|
||||
import urllib
|
||||
import datetime
|
||||
|
||||
import traceback
|
||||
from io import StringIO
|
||||
|
||||
from google.appengine.ext import db
|
||||
from google.appengine.api import taskqueue
|
||||
from google.appengine.api import users
|
||||
from google.appengine.api import mail
|
||||
import webapp2
|
||||
from google.appengine.ext.webapp import template
|
||||
#from google.appengine.ext.webapp2 import util
|
||||
from google.appengine.runtime import DeadlineExceededError
|
||||
|
||||
from ffstorage import *
|
||||
|
||||
from fanficfare import adapters, writers, exceptions
|
||||
from fanficfare.htmlcleanup import stripHTML
|
||||
from fanficfare.configurable import Configuration
|
||||
|
||||
class UserConfigServer(webapp2.RequestHandler):
|
||||
|
||||
def getUserConfig(self,user,url,fileformat):
|
||||
|
||||
configuration = Configuration(adapters.getConfigSectionsFor(url),fileformat)
|
||||
|
||||
logging.debug('reading defaults.ini config file')
|
||||
configuration.read('fanficfare/defaults.ini')
|
||||
|
||||
## Pull user's config record.
|
||||
l = UserConfig.all().filter('user =', user).fetch(1)
|
||||
if l and l[0].config:
|
||||
uconfig=l[0]
|
||||
#logging.debug('reading config from UserConfig(%s)'%uconfig.config)
|
||||
configuration.readfp(StringIO(uconfig.config.decode('utf-8')))
|
||||
|
||||
return configuration
|
||||
|
||||
class MainHandler(webapp2.RequestHandler):
|
||||
def get(self):
|
||||
user = users.get_current_user()
|
||||
if user:
|
||||
error = self.request.get('error')
|
||||
template_values = {'nickname' : user.nickname(), 'authorized': True}
|
||||
url = self.request.get('url')
|
||||
template_values['url'] = url
|
||||
|
||||
if error:
|
||||
if error == 'login_required':
|
||||
template_values['error_message'] = 'This story (or one of the chapters) requires you to be logged in.'
|
||||
elif error == 'bad_url':
|
||||
template_values['error_message'] = 'Unsupported URL: ' + url
|
||||
elif error == 'custom':
|
||||
template_values['error_message'] = 'Error happened: ' + self.request.get('errtext')
|
||||
elif error == 'configsaved':
|
||||
template_values['error_message'] = 'Configuration Saved'
|
||||
elif error == 'recentcleared':
|
||||
template_values['error_message'] = 'Your Recent Downloads List has been Cleared'
|
||||
|
||||
self.response.headers['Content-Type'] = 'text/html'
|
||||
path = os.path.join(os.path.dirname(__file__), 'index.html')
|
||||
|
||||
else:
|
||||
logging.debug(users.create_login_url('/'))
|
||||
url = users.create_login_url(self.request.uri)
|
||||
template_values = {'login_url' : url, 'authorized': False}
|
||||
path = os.path.join(os.path.dirname(__file__), 'index.html')
|
||||
|
||||
|
||||
template_values['supported_sites'] = '<dl>\n'
|
||||
for (site,examples) in adapters.getSiteExamples():
|
||||
template_values['supported_sites'] += "<dt>%s</dt>\n<dd>Example Story URLs:<br>"%site
|
||||
for u in examples:
|
||||
template_values['supported_sites'] += "<a href='%s'>%s</a><br>\n"%(u,u)
|
||||
template_values['supported_sites'] += "</dd>\n"
|
||||
template_values['supported_sites'] += '</dl>\n'
|
||||
|
||||
self.response.out.write(template.render(path, template_values))
|
||||
|
||||
|
||||
class EditConfigServer(UserConfigServer):
|
||||
def get(self):
|
||||
self.post()
|
||||
|
||||
def post(self):
|
||||
user = users.get_current_user()
|
||||
if not user:
|
||||
self.redirect(users.create_login_url(self.request.uri))
|
||||
return
|
||||
|
||||
template_values = {'nickname' : user.nickname(), 'authorized': True}
|
||||
|
||||
## Pull user's config record.
|
||||
l = UserConfig.all().filter('user =', user).fetch(1)
|
||||
if l:
|
||||
uconfig=l[0]
|
||||
else:
|
||||
uconfig=None
|
||||
|
||||
if self.request.get('update'):
|
||||
if uconfig is None:
|
||||
uconfig = UserConfig()
|
||||
uconfig.user = user
|
||||
uconfig.config = self.request.get('config').encode('utf8')[:10000] ## just in case.
|
||||
uconfig.put()
|
||||
try:
|
||||
# just getting config for testing purposes.
|
||||
configuration = self.getUserConfig(user,"test1.com","epub")
|
||||
self.redirect("/?error=configsaved")
|
||||
except Exception as e:
|
||||
logging.info("Saved Config Failed:%s"%e)
|
||||
self.redirect("/?error=custom&errtext=%s"%urllib.quote(unicode(e),''))
|
||||
else: # not update, assume display for edit
|
||||
if uconfig is not None and uconfig.config:
|
||||
config = uconfig.config
|
||||
else:
|
||||
configfile = open("fanficfare/example.ini","rb")
|
||||
config = configfile.read()
|
||||
configfile.close()
|
||||
template_values['config'] = config
|
||||
|
||||
configfile = open("fanficfare/defaults.ini","rb")
|
||||
config = configfile.read()
|
||||
configfile.close()
|
||||
template_values['defaultsini'] = config
|
||||
|
||||
path = os.path.join(os.path.dirname(__file__), 'editconfig.html')
|
||||
self.response.headers['Content-Type'] = 'text/html'
|
||||
self.response.out.write(template.render(path, template_values))
|
||||
|
||||
|
||||
class FileServer(webapp2.RequestHandler):
|
||||
|
||||
def get(self):
|
||||
fileId = self.request.get('id')
|
||||
|
||||
if fileId == None or len(fileId) < 3:
|
||||
self.redirect('/')
|
||||
return
|
||||
|
||||
try:
|
||||
download = getDownloadMeta(id=fileId)
|
||||
|
||||
name = download.name.encode('utf-8')
|
||||
|
||||
logging.info("Serving file: %s" % name)
|
||||
|
||||
if name.endswith('.epub'):
|
||||
self.response.headers['Content-Type'] = 'application/epub+zip'
|
||||
elif name.endswith('.html'):
|
||||
self.response.headers['Content-Type'] = 'text/html'
|
||||
elif name.endswith('.txt'):
|
||||
self.response.headers['Content-Type'] = 'text/plain'
|
||||
elif name.endswith('.mobi'):
|
||||
self.response.headers['Content-Type'] = 'application/x-mobipocket-ebook'
|
||||
elif name.endswith('.zip'):
|
||||
self.response.headers['Content-Type'] = 'application/zip'
|
||||
else:
|
||||
self.response.headers['Content-Type'] = 'application/octet-stream'
|
||||
|
||||
self.response.headers['Content-disposition'] = 'attachment; filename="%s"' % name
|
||||
|
||||
data = DownloadData.all().filter("download =", download).order("index")
|
||||
# epubs are all already compressed.
|
||||
# Each chunk is compress individually to avoid having
|
||||
# to hold the whole in memory just for the
|
||||
# compress/uncompress
|
||||
if download.format != 'epub':
|
||||
def decompress(data):
|
||||
try:
|
||||
return zlib.decompress(data)
|
||||
# if error, assume it's a chunk from before we started compessing.
|
||||
except zlib.error:
|
||||
return data
|
||||
else:
|
||||
def decompress(data):
|
||||
return data
|
||||
|
||||
for datum in data:
|
||||
self.response.out.write(decompress(datum.blob))
|
||||
|
||||
except Exception as e:
|
||||
fic = DownloadMeta()
|
||||
fic.failure = unicode(e)
|
||||
|
||||
template_values = dict(fic = fic,
|
||||
#nickname = user.nickname(),
|
||||
#escaped_url = escaped_url
|
||||
)
|
||||
path = os.path.join(os.path.dirname(__file__), 'status.html')
|
||||
self.response.out.write(template.render(path, template_values))
|
||||
|
||||
class FileStatusServer(webapp2.RequestHandler):
|
||||
def get(self):
|
||||
user = users.get_current_user()
|
||||
if not user:
|
||||
self.redirect(users.create_login_url(self.request.uri))
|
||||
return
|
||||
|
||||
fileId = self.request.get('id')
|
||||
|
||||
if fileId == None or len(fileId) < 3:
|
||||
self.redirect('/')
|
||||
|
||||
escaped_url=False
|
||||
|
||||
try:
|
||||
download = getDownloadMeta(id=fileId)
|
||||
|
||||
if download:
|
||||
logging.info("Status url: %s" % download.url)
|
||||
if download.completed and download.format=='epub':
|
||||
escaped_url = urllib.quote(self.request.host_url+"/file/"+download.name+"."+download.format+"?id="+fileId+"&fake=file."+download.format,'')
|
||||
else:
|
||||
download = DownloadMeta()
|
||||
download.failure = "Download not found"
|
||||
|
||||
except Exception as e:
|
||||
download = DownloadMeta()
|
||||
download.failure = unicode(e)
|
||||
|
||||
template_values = dict(fic = download,
|
||||
nickname = user.nickname(),
|
||||
escaped_url = escaped_url
|
||||
)
|
||||
path = os.path.join(os.path.dirname(__file__), 'status.html')
|
||||
self.response.out.write(template.render(path, template_values))
|
||||
|
||||
class ClearRecentServer(webapp2.RequestHandler):
|
||||
def get(self):
|
||||
user = users.get_current_user()
|
||||
if not user:
|
||||
self.redirect(users.create_login_url(self.request.uri))
|
||||
return
|
||||
|
||||
logging.info("Clearing Recent List for user: "+user.nickname())
|
||||
q = DownloadMeta.all()
|
||||
q.filter('user =', user)
|
||||
num=0
|
||||
while( True ):
|
||||
results = q.fetch(100)
|
||||
if results:
|
||||
for d in results:
|
||||
d.delete()
|
||||
for chunk in d.data_chunks:
|
||||
chunk.delete()
|
||||
num = num + 1
|
||||
logging.debug('Delete '+d.url)
|
||||
else:
|
||||
break
|
||||
logging.info('Deleted %d instances download.' % num)
|
||||
self.redirect("/?error=recentcleared")
|
||||
|
||||
class RecentFilesServer(webapp2.RequestHandler):
|
||||
def get(self):
|
||||
user = users.get_current_user()
|
||||
if not user:
|
||||
self.redirect(users.create_login_url(self.request.uri))
|
||||
return
|
||||
|
||||
q = DownloadMeta.all()
|
||||
q.filter('user =', user).order('-date')
|
||||
fics = q.fetch(100)
|
||||
logging.info("Recent fetched %d downloads for user %s."%(len(fics),user.nickname()))
|
||||
|
||||
for fic in fics:
|
||||
if fic.completed and fic.format == 'epub':
|
||||
fic.escaped_url = urllib.quote(self.request.host_url+"/file/"+fic.name+"."+fic.format+"?id="+unicode(fic.key())+"&fake=file."+fic.format,'')
|
||||
|
||||
template_values = dict(fics = fics, nickname = user.nickname())
|
||||
path = os.path.join(os.path.dirname(__file__), 'recent.html')
|
||||
self.response.out.write(template.render(path, template_values))
|
||||
|
||||
class AllRecentFilesServer(webapp2.RequestHandler):
|
||||
def get(self):
|
||||
user = users.get_current_user()
|
||||
if not user:
|
||||
self.redirect(users.create_login_url(self.request.uri))
|
||||
return
|
||||
|
||||
q = SavedMeta.all()
|
||||
if self.request.get('bydate'):
|
||||
q.order('-date')
|
||||
else:
|
||||
q.order('-count')
|
||||
|
||||
fics = q.fetch(200)
|
||||
logging.info("Recent fetched %d downloads for user %s."%(len(fics),user.nickname()))
|
||||
|
||||
sendslugs = []
|
||||
|
||||
for fic in fics:
|
||||
ficslug = FicSlug(fic)
|
||||
sendslugs.append(ficslug)
|
||||
|
||||
template_values = dict(fics = sendslugs, nickname = user.nickname())
|
||||
path = os.path.join(os.path.dirname(__file__), 'allrecent.html')
|
||||
self.response.out.write(template.render(path, template_values))
|
||||
|
||||
class FicSlug():
|
||||
def __init__(self,savedmeta):
|
||||
self.url = savedmeta.url
|
||||
self.count = savedmeta.count
|
||||
for k, v in savedmeta.meta.iteritems():
|
||||
if k == 'description':
|
||||
v = stripHTML(v)
|
||||
setattr(self,k,v)
|
||||
|
||||
class FanfictionDownloader(UserConfigServer):
|
||||
def get(self):
|
||||
self.post()
|
||||
|
||||
def post(self):
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
user = users.get_current_user()
|
||||
if not user:
|
||||
self.redirect(users.create_login_url(self.request.uri))
|
||||
return
|
||||
|
||||
format = self.request.get('format')
|
||||
url = self.request.get('url')
|
||||
|
||||
if not url or url.strip() == "":
|
||||
self.redirect('/')
|
||||
return
|
||||
|
||||
# Allow chapter range with URL.
|
||||
# like test1.com?sid=5[4-6] or [4,6]
|
||||
url,ch_begin,ch_end = adapters.get_url_chapter_range(url)
|
||||
|
||||
logging.info("Queuing Download: %s" % url)
|
||||
login = self.request.get('login')
|
||||
password = self.request.get('password')
|
||||
is_adult = self.request.get('is_adult') == "on"
|
||||
email = self.request.get('email')
|
||||
|
||||
# use existing record if available. Fetched/Created before
|
||||
# the adapter can normalize the URL in case we need to record
|
||||
# an exception.
|
||||
download = getDownloadMeta(url=url,user=user,format=format,new=True)
|
||||
|
||||
adapter = None
|
||||
try:
|
||||
try:
|
||||
configuration = self.getUserConfig(user,url,format)
|
||||
except exceptions.UnknownSite:
|
||||
self.redirect("/?error=custom&errtext=%s"%urllib.quote("Unsupported site in URL (%s). See 'Support sites' list below."%url,''))
|
||||
return
|
||||
except Exception as e:
|
||||
self.redirect("/?error=custom&errtext=%s"%urllib.quote("There's an error in your User Configuration: "+unicode(e),'')[:2048]) # limited due to Locatton header length limit.
|
||||
return
|
||||
|
||||
adapter = adapters.getAdapter(configuration,url)
|
||||
adapter.setChaptersRange(ch_begin,ch_end)
|
||||
logging.info('Created an adaper: %s' % adapter)
|
||||
|
||||
if login or password:
|
||||
adapter.username=login
|
||||
adapter.password=password
|
||||
adapter.is_adult=is_adult
|
||||
|
||||
## This scrapes the metadata, which will be
|
||||
## duplicated in the queue task, but it
|
||||
## detects bad URLs, bad login, bad story, etc
|
||||
## without waiting for the queue. So I think
|
||||
## it's worth the double up. Could maybe save
|
||||
## it all in the download object someday.
|
||||
story = adapter.getStoryMetadataOnly()
|
||||
|
||||
## Fetch again using normalized story URL. The one
|
||||
## fetched/created above, if different, will not be saved.
|
||||
download = getDownloadMeta(url=story.getMetadata('storyUrl'),
|
||||
user=user,format=format,new=True)
|
||||
|
||||
download.title = story.getMetadata('title')
|
||||
download.author = story.getMetadata('author')
|
||||
download.url = story.getMetadata('storyUrl')
|
||||
download.ch_begin = ch_begin
|
||||
download.ch_end = ch_end
|
||||
download.put()
|
||||
|
||||
taskqueue.add(url='/fdowntask',
|
||||
queue_name="download",
|
||||
params={'id':unicode(download.key()),
|
||||
'format':format,
|
||||
'url':download.url,
|
||||
'login':login,
|
||||
'password':password,
|
||||
'user':user.email(),
|
||||
'email':email,
|
||||
'is_adult':is_adult})
|
||||
|
||||
logging.info("enqueued download key: " + unicode(download.key()))
|
||||
|
||||
except (exceptions.FailedToLogin,exceptions.AdultCheckRequired), e:
|
||||
download.failure = unicode(e)
|
||||
download.put()
|
||||
logging.info(unicode(e))
|
||||
is_login= ( isinstance(e, exceptions.FailedToLogin) )
|
||||
is_passwdonly = is_login and e.passwdonly
|
||||
template_values = dict(nickname = user.nickname(),
|
||||
url = url,
|
||||
format = format,
|
||||
site = adapter.getConfigSection(),
|
||||
fic = download,
|
||||
is_login=is_login,
|
||||
is_passwdonly=is_passwdonly
|
||||
)
|
||||
# thewriterscoffeeshop.com can do adult check *and* user required.
|
||||
if isinstance(e,exceptions.AdultCheckRequired):
|
||||
template_values['login']=login
|
||||
template_values['password']=password
|
||||
|
||||
path = os.path.join(os.path.dirname(__file__), 'login.html')
|
||||
self.response.out.write(template.render(path, template_values))
|
||||
return
|
||||
except (exceptions.InvalidStoryURL,exceptions.UnknownSite,exceptions.StoryDoesNotExist), e:
|
||||
logging.warn(unicode(e))
|
||||
download.failure = unicode(e)
|
||||
download.put()
|
||||
except Exception as e:
|
||||
logging.error("Failure Queuing Download: url:%s" % url)
|
||||
logging.exception(e)
|
||||
download.failure = unicode(e)
|
||||
download.put()
|
||||
|
||||
self.redirect('/status?id='+unicode(download.key()))
|
||||
|
||||
return
|
||||
|
||||
|
||||
class FanfictionDownloaderTask(UserConfigServer):
|
||||
|
||||
def post(self):
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
fileId = self.request.get('id')
|
||||
# User object can't pass, just email address
|
||||
user = users.User(self.request.get('user'))
|
||||
format = self.request.get('format')
|
||||
url = self.request.get('url')
|
||||
login = self.request.get('login')
|
||||
password = self.request.get('password')
|
||||
is_adult = self.request.get('is_adult')
|
||||
email = self.request.get('email')
|
||||
|
||||
logging.info("Downloading: " + url + " for user: "+user.nickname())
|
||||
logging.info("ID: " + fileId)
|
||||
|
||||
adapter = None
|
||||
writerClass = None
|
||||
|
||||
# use existing record if available.
|
||||
# fileId should have record from /fdown.
|
||||
download = getDownloadMeta(id=fileId,url=url,user=user,format=format,new=True)
|
||||
for chunk in download.data_chunks:
|
||||
chunk.delete()
|
||||
download.put()
|
||||
|
||||
logging.info('Creating adapter...')
|
||||
|
||||
try:
|
||||
configuration = self.getUserConfig(user,url,format)
|
||||
adapter = adapters.getAdapter(configuration,url)
|
||||
adapter.setChaptersRange(download.ch_begin,download.ch_end)
|
||||
|
||||
logging.info('Created an adapter: %s' % adapter)
|
||||
|
||||
if login or password:
|
||||
adapter.username=login
|
||||
adapter.password=password
|
||||
adapter.is_adult=is_adult
|
||||
|
||||
# adapter.getStory() is what does all the heavy lifting.
|
||||
# adapter.getStoryMetadataOnly() only fetches enough to
|
||||
# get metadata. writer.writeStory() will call
|
||||
# adapter.getStory(), too.
|
||||
writer = writers.getWriter(format,configuration,adapter)
|
||||
download.name = writer.getOutputFileName()
|
||||
#logging.debug('output_filename:'+writer.getConfig('output_filename'))
|
||||
logging.debug('getOutputFileName:'+writer.getOutputFileName())
|
||||
download.title = adapter.getStory().getMetadata('title')
|
||||
download.author = adapter.getStory().getMetadata('author')
|
||||
download.url = adapter.getStory().getMetadata('storyUrl')
|
||||
download.put()
|
||||
|
||||
allmeta = adapter.getStory().getAllMetadata(removeallentities=True,doreplacements=False)
|
||||
|
||||
outbuffer = BytesIO()
|
||||
writer.writeStory(outbuffer)
|
||||
data = outbuffer.getvalue()
|
||||
outbuffer.close()
|
||||
del outbuffer
|
||||
#del writer.adapter
|
||||
#del writer.story
|
||||
del writer
|
||||
#del adapter.story
|
||||
del adapter
|
||||
|
||||
# logging.debug("Email: %s"%email)
|
||||
# if email and re.match(r"^[^@]+@[^@]+", email):
|
||||
# try:
|
||||
# logging.info("Email Attempt")
|
||||
# send_mail_attachment(user.email(),
|
||||
# email.strip(),
|
||||
# download.title + " by " + download.author,
|
||||
# download.title + " by " + download.author + " URL: "+download.url,
|
||||
# download.name,
|
||||
# data)
|
||||
# logging.info("Email Sent")
|
||||
# except Exception as e:
|
||||
# # download.failure = "Failed to send Email %s"%unicode(e)
|
||||
# logging.warn(e, exc_info=True)
|
||||
|
||||
# epubs are all already compressed. Each chunk is
|
||||
# compressed individually to avoid having to hold the
|
||||
# whole in memory just for the compress/uncompress.
|
||||
if format != 'epub':
|
||||
def compress(data):
|
||||
return zlib.compress(data)
|
||||
else:
|
||||
def compress(data):
|
||||
return data
|
||||
|
||||
# delete existing chunks first
|
||||
for chunk in download.data_chunks:
|
||||
chunk.delete()
|
||||
|
||||
index=0
|
||||
while( len(data) > 0 ):
|
||||
# logging.info("len(data): %s" % len(data))
|
||||
DownloadData(download=download,
|
||||
index=index,
|
||||
blob=compress(data[:1000000])).put()
|
||||
index += 1
|
||||
data = data[1000000:]
|
||||
download.completed=True
|
||||
download.put()
|
||||
|
||||
smetal = SavedMeta.all().filter('url =', allmeta['storyUrl'] ).fetch(1)
|
||||
if smetal and smetal[0]:
|
||||
smeta = smetal[0]
|
||||
smeta.count += 1
|
||||
else:
|
||||
smeta=SavedMeta()
|
||||
smeta.count = 1
|
||||
|
||||
smeta.url = allmeta['storyUrl']
|
||||
smeta.title = allmeta['title']
|
||||
smeta.author = allmeta['author']
|
||||
smeta.meta = allmeta
|
||||
smeta.date = datetime.datetime.now()
|
||||
smeta.put()
|
||||
|
||||
logging.info("Download finished OK")
|
||||
del data
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
download.failure = unicode(e)
|
||||
download.put()
|
||||
return
|
||||
|
||||
return
|
||||
|
||||
def getDownloadMeta(id=None,url=None,user=None,format=None,new=False):
|
||||
## try to get download rec from passed id first. then fall back
|
||||
## to user/url/format
|
||||
download = None
|
||||
if id:
|
||||
try:
|
||||
download = db.get(db.Key(id))
|
||||
logging.info("DownloadMeta found by ID:"+id)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not download and url and user and format:
|
||||
try:
|
||||
q = DownloadMeta.all().filter('user =', user).filter('url =',url).filter('format =',format).fetch(1)
|
||||
if( q is not None and len(q) > 0 ):
|
||||
logging.debug("DownloadMeta found by user:%s url:%s format:%s"%(user,url,format))
|
||||
download = q[0]
|
||||
except:
|
||||
pass
|
||||
|
||||
if new:
|
||||
# NOT clearing existing chunks here, because this record may
|
||||
# never be saved.
|
||||
if not download:
|
||||
logging.debug("New DownloadMeta")
|
||||
download = DownloadMeta()
|
||||
|
||||
download.completed=False
|
||||
download.failure=None
|
||||
download.date=datetime.datetime.now()
|
||||
|
||||
download.version = "%s:%s" % (os.environ['APPLICATION_ID'],os.environ['CURRENT_VERSION_ID'])
|
||||
if user:
|
||||
download.user = user
|
||||
if url:
|
||||
download.url = url
|
||||
if format:
|
||||
download.format = format
|
||||
|
||||
return download
|
||||
|
||||
def send_mail_attachment(sender,to,subject,body,attach_fn,attach_data):
|
||||
msg = mail.EmailMessage()
|
||||
msg.sender = sender
|
||||
msg.to = [to]
|
||||
msg.subject = subject
|
||||
msg.body = body
|
||||
msg.attachments = [mail.Attachment(attach_fn,attach_data)]
|
||||
msg.check_initialized()
|
||||
msg.send()
|
||||
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
app = webapp2.WSGIApplication([('/', MainHandler),
|
||||
('/fdowntask', FanfictionDownloaderTask),
|
||||
('/fdown', FanfictionDownloader),
|
||||
(r'/file.*', FileServer),
|
||||
('/status', FileStatusServer),
|
||||
('/allrecent', AllRecentFilesServer),
|
||||
('/recent', RecentFilesServer),
|
||||
('/editconfig', EditConfigServer),
|
||||
('/clearrecent', ClearRecentServer),
|
||||
],
|
||||
debug=False)
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
queue:
|
||||
- name: default
|
||||
rate: 1/s
|
||||
- name: download
|
||||
rate: 10/s
|
||||
retry_parameters:
|
||||
task_retry_limit: 2
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<html>
|
||||
<head>
|
||||
<link href="/css/index.css" rel="stylesheet" type="text/css">
|
||||
<title>FanFicFare (fanfiction.net, fanficauthors, fictionalley, ficwad to epub and HTML)</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<div id='main'>
|
||||
<h1>
|
||||
<a href="/" style="text-decoration: none; color: black;">FanFicFare</a>
|
||||
</h1>
|
||||
|
||||
<div id='urlbox'>
|
||||
<div id='greeting'>
|
||||
<p>Hi, {{ nickname }}! These are the fanfics you've recently requested.</p>
|
||||
<p><a href="/clearrecent">Clear your Recent Downloads List</a></p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="borderbox">
|
||||
{% for fic in fics %}
|
||||
<p>
|
||||
{% if fic.completed %}
|
||||
<span class="recent"><a href="/file?id={{ fic.key }}">Download <i>{{ fic.title }}</i></a></span>
|
||||
by {{ fic.author }} ({{ fic.format }})
|
||||
{% endif %}
|
||||
{% if not fic.completed and not fic.failure %}
|
||||
<span class="recent">Processing <i>{{ fic.title }}</i></span>
|
||||
by {{ fic.author }} ({{ fic.format }})
|
||||
{% endif %}
|
||||
{% if fic.failure %}
|
||||
<span id='error'>{{ fic.failure }}</span>
|
||||
{% endif %}
|
||||
<a href="{{ fic.url }}" title="Link to original story">Source</a>
|
||||
{% if fic.completed and fic.escaped_url %}
|
||||
<a href="http://www.convertfiles.com/index.php?url={{ fic.escaped_url }}" title="Convert to other formats using Convertfiles.com">Convert</a>
|
||||
{% endif %}
|
||||
</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2011 Fanficdownloader team
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
## Just to shut up the appengine warning about "You are using the
|
||||
## default Django version (0.96). The default Django version will
|
||||
## change in an App Engine release in the near future. Please call
|
||||
## use_library() to explicitly select a Django version. For more
|
||||
## information see
|
||||
## http://code.google.com/appengine/docs/python/tools/libraries.html#Django"
|
||||
|
||||
pass
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 31 KiB |
|
|
@ -1,54 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
|
||||
<html>
|
||||
<head>
|
||||
<link href="/css/index.css" rel="stylesheet" type="text/css">
|
||||
<title>FFF - {% if fic.completed %} Finished {% else %} {% if fic.failure %} Failed {% else %} Working... {% endif %} {% endif %}</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
{% if not fic.completed and not fic.failure %}
|
||||
<meta http-equiv="refresh" content="10">
|
||||
{% endif %}
|
||||
</head>
|
||||
<body>
|
||||
<div id='main'>
|
||||
<h1>
|
||||
<a href="/" style="text-decoration: none; color: black;">FanFicFare</a>
|
||||
</h1>
|
||||
|
||||
<div id='urlbox'>
|
||||
{% if fic.url %}
|
||||
<div id='greeting'>
|
||||
<p>
|
||||
{% if fic.completed %}
|
||||
<p>Your fic has finished processing and you can download it now.</p>
|
||||
<span class="recent"><a href="/file?id={{ fic.key }}">Download <i>{{ fic.title }}</i></a></span>
|
||||
by {{ fic.author }} ({{ fic.format }})
|
||||
{% endif %}
|
||||
{% if fic.failure %}
|
||||
<span id='error'>{{ fic.failure }}</span>
|
||||
{% endif %}
|
||||
{% if not fic.completed and not fic.failure %}
|
||||
<p>Not done yet. This page will periodically poll to see if your story has finished.</p>
|
||||
<span class="recent">Processing <i>{{ fic.title }}</i></span>
|
||||
by {{ fic.author }} ({{ fic.format }})
|
||||
{% endif %}
|
||||
<a href="{{ fic.url }}" title="Link to original story">Source</a>
|
||||
{% if fic.completed and escaped_url %}
|
||||
<a href="http://www.convertfiles.com/index.php?url={{ escaped_url }}" title="Convert to other formats using Convertfiles.com">Convert</a>
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
<p>See your personal list of <a href="/recent">previously downloaded fanfics</a>.</p>
|
||||
</div>
|
||||
|
||||
<div style='text-align: center'>
|
||||
<img src="http://code.google.com/appengine/images/appengine-silver-120x30.gif"
|
||||
alt="Powered by Google App Engine" />
|
||||
<br/><br/>
|
||||
This is a web front-end to <a href="https://github.com/JimmXinu/FanFicFare/">FanFicFare</a><br/>
|
||||
Copyright © FanFicFare team
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
|
@ -1,109 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Copyright 2011 Fanficdownloader team
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
remover.py
|
||||
|
||||
Created by Roman on 2010-06-20.
|
||||
Copyright 2011 Fanficdownloader team
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
#from google.appengine.ext.webapp import util
|
||||
import webapp2
|
||||
#from google.appengine.ext import webapp
|
||||
from google.appengine.api import users
|
||||
from google.appengine.api import taskqueue
|
||||
from google.appengine.api import memcache
|
||||
|
||||
from ffstorage import *
|
||||
|
||||
class Remover(webapp2.RequestHandler):
|
||||
def get(self):
|
||||
logging.debug("Starting r3m0v3r")
|
||||
user = users.get_current_user()
|
||||
logging.debug("Working as user %s" % user)
|
||||
theDate = datetime.datetime.now() - datetime.timedelta(days=3)
|
||||
logging.debug("Will delete stuff older than %s" % theDate)
|
||||
|
||||
fics = DownloadMeta.all()
|
||||
fics.filter("date <",theDate).order("date")
|
||||
|
||||
results = fics.fetch(100)
|
||||
logging.debug([x.name for x in results])
|
||||
|
||||
num=0
|
||||
for d in results:
|
||||
d.delete()
|
||||
for c in d.data_chunks:
|
||||
c.delete()
|
||||
num += 1
|
||||
logging.debug('Delete '+d.url)
|
||||
|
||||
logging.info('Deleted instances: %d' % num)
|
||||
self.response.headers['Content-Type'] = 'text/html'
|
||||
self.response.out.write('Deleted instances: %d<br>' % num)
|
||||
|
||||
class RemoveOrphanDataChunks(webapp2.RequestHandler):
|
||||
|
||||
def get(self):
|
||||
logging.debug("Starting RemoveOrphanDataChunks")
|
||||
user = users.get_current_user()
|
||||
logging.debug("Working as user %s" % user)
|
||||
|
||||
## Can't search for all chunks in web req because it's too
|
||||
## long. Can't do it in a queue task, because it's still too
|
||||
## long. Can't try ordering by id or download because the ids
|
||||
## are not increasing. Instead, use a saved cursor to walk
|
||||
## all the way through over time, then starting at the top
|
||||
## again when finished.
|
||||
|
||||
chunks = DownloadData.all()
|
||||
|
||||
cursor = memcache.get('orphan_search_cursor')
|
||||
if cursor:
|
||||
chunks.with_cursor(cursor)
|
||||
|
||||
deleted = 0
|
||||
num = 0
|
||||
step = 100
|
||||
results = chunks.fetch(step)
|
||||
for d in results:
|
||||
## This is the only way to test for orphans I could find.
|
||||
try:
|
||||
meta = d.download
|
||||
except db.ReferencePropertyResolveError:
|
||||
## delete orphan chunk.
|
||||
d.delete()
|
||||
deleted += 1
|
||||
num += 1
|
||||
if num < step:
|
||||
memcache.delete('orphan_search_cursor')
|
||||
logging.warn('Orphan search reached end, starting over next time.')
|
||||
else:
|
||||
memcache.set('orphan_search_cursor',chunks.cursor())
|
||||
|
||||
logging.info('Deleted %d orphan chunks from %d total.' % (deleted,num))
|
||||
self.response.headers['Content-Type'] = 'text/html'
|
||||
self.response.out.write('Deleted %d orphan chunks from %d total.' % (deleted,num))
|
||||
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
app = webapp2.WSGIApplication([('/r3m0v3r', Remover),
|
||||
('/r3m0v3rOrphans', RemoveOrphanDataChunks)],
|
||||
debug=False)
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Copyright 2011 Fanficdownloader team
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
#from google.appengine.ext.webapp import util
|
||||
import webapp2
|
||||
#from google.appengine.ext import webapp
|
||||
from google.appengine.api import users
|
||||
from google.appengine.api import taskqueue
|
||||
from google.appengine.api import memcache
|
||||
|
||||
from ffstorage import *
|
||||
|
||||
class Tally(webapp2.RequestHandler):
|
||||
def get(self):
|
||||
logging.debug("Starting Tally")
|
||||
user = users.get_current_user()
|
||||
logging.debug("Working as user %s" % user)
|
||||
|
||||
fics = DownloadMeta.all()
|
||||
|
||||
cursor = memcache.get('tally_search_cursor')
|
||||
if cursor:
|
||||
fics.with_cursor(cursor)
|
||||
|
||||
self.response.out.write('"user","url","name","title","author","format","failure","completed","date","version"<br/>')
|
||||
num = 0
|
||||
step = 500
|
||||
results = fics.fetch(step)
|
||||
for d in results:
|
||||
self.response.out.write('"%s","%s","%s","%s","%s","%s","%s","%s","%s","%s"<br/>' %
|
||||
(d.user,d.url,d.name,d.title,d.author,
|
||||
d.format,d.failure,d.completed,d.date,
|
||||
d.version))
|
||||
num += 1
|
||||
if num < step:
|
||||
memcache.delete('tally_search_cursor')
|
||||
logging.warn('Tally search reached end, starting over next time.')
|
||||
else:
|
||||
memcache.set('tally_search_cursor',fics.cursor())
|
||||
|
||||
logging.info('Tallied %d fics.' % num)
|
||||
self.response.out.write('<br/>Tallied %d fics.<br/>' % num)
|
||||
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
app = webapp2.WSGIApplication([('/tally', Tally),
|
||||
],
|
||||
debug=False)
|
||||
Loading…
Reference in a new issue