mirror of
https://github.com/JimmXinu/FanFicFare.git
synced 2025-12-06 08:52:55 +01:00
Plugin BG Jobs: Remove old multi-process code
This commit is contained in:
parent
a82ef5dbae
commit
4a26dfdfff
4 changed files with 5 additions and 135 deletions
|
|
@ -417,7 +417,6 @@ class ConfigWidget(QWidget):
|
||||||
prefs['update_existing_only_from_email'] = self.imap_tab.update_existing_only_from_email.isChecked()
|
prefs['update_existing_only_from_email'] = self.imap_tab.update_existing_only_from_email.isChecked()
|
||||||
prefs['download_from_email_immediately'] = self.imap_tab.download_from_email_immediately.isChecked()
|
prefs['download_from_email_immediately'] = self.imap_tab.download_from_email_immediately.isChecked()
|
||||||
|
|
||||||
prefs['single_proc_jobs'] = self.other_tab.single_proc_jobs.isChecked()
|
|
||||||
prefs['site_split_jobs'] = self.other_tab.site_split_jobs.isChecked()
|
prefs['site_split_jobs'] = self.other_tab.site_split_jobs.isChecked()
|
||||||
prefs['reconsolidate_jobs'] = self.other_tab.reconsolidate_jobs.isChecked()
|
prefs['reconsolidate_jobs'] = self.other_tab.reconsolidate_jobs.isChecked()
|
||||||
|
|
||||||
|
|
@ -1309,11 +1308,6 @@ class OtherTab(QWidget):
|
||||||
label.setWordWrap(True)
|
label.setWordWrap(True)
|
||||||
groupl.addWidget(label)
|
groupl.addWidget(label)
|
||||||
|
|
||||||
self.single_proc_jobs = QCheckBox(_('Use new, single process background jobs'),self)
|
|
||||||
self.single_proc_jobs.setToolTip(_("Uncheck to go back to old multi-process BG jobs."))
|
|
||||||
self.single_proc_jobs.setChecked(prefs['single_proc_jobs'])
|
|
||||||
groupl.addWidget(self.single_proc_jobs)
|
|
||||||
|
|
||||||
label = QLabel("<p>"+
|
label = QLabel("<p>"+
|
||||||
_("Options with the new version:")+
|
_("Options with the new version:")+
|
||||||
"<ul>"+
|
"<ul>"+
|
||||||
|
|
|
||||||
|
|
@ -1810,15 +1810,9 @@ class FanFicFarePlugin(InterfaceAction):
|
||||||
# get libs from plugin zip.
|
# get libs from plugin zip.
|
||||||
options['plugin_path'] = self.interface_action_base_plugin.plugin_path
|
options['plugin_path'] = self.interface_action_base_plugin.plugin_path
|
||||||
|
|
||||||
if prefs['single_proc_jobs']: ## YYY Single BG job
|
args = ['calibre_plugins.fanficfare_plugin.jobs',
|
||||||
args = ['calibre_plugins.fanficfare_plugin.jobs',
|
'do_download_worker_single',
|
||||||
'do_download_worker_single',
|
(site, book_list, options, merge)]
|
||||||
(site, book_list, options, merge)]
|
|
||||||
else: ## MultiBG Job split by site
|
|
||||||
cpus = self.gui.job_manager.server.pool_size
|
|
||||||
args = ['calibre_plugins.fanficfare_plugin.jobs',
|
|
||||||
'do_download_worker_multiproc',
|
|
||||||
(site, book_list, options, cpus, merge)]
|
|
||||||
if site:
|
if site:
|
||||||
desc = _('Download %s FanFiction Book(s) for %s') % (sum(1 for x in book_list if x['good']),site)
|
desc = _('Download %s FanFiction Book(s) for %s') % (sum(1 for x in book_list if x['good']),site)
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -32,125 +32,6 @@ except NameError:
|
||||||
#
|
#
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
def do_download_worker_multiproc(site,
|
|
||||||
book_list,
|
|
||||||
options,
|
|
||||||
cpus,
|
|
||||||
merge,
|
|
||||||
notification=lambda x,y:x):
|
|
||||||
'''
|
|
||||||
Coordinator job, to launch child jobs to do downloads.
|
|
||||||
This is run as a worker job in the background to keep the UI more
|
|
||||||
responsive and get around any memory leak issues as it will launch
|
|
||||||
a child job for each book as a worker process
|
|
||||||
'''
|
|
||||||
## Now running one BG proc per site, which downloads for the same
|
|
||||||
## site in serial.
|
|
||||||
logger.info("CPUs:%s"%cpus)
|
|
||||||
server = Server(pool_size=cpus)
|
|
||||||
|
|
||||||
logger.info(options['version'])
|
|
||||||
|
|
||||||
## same info debug calibre prints out at startup. For when users
|
|
||||||
## give me job output instead of debug log.
|
|
||||||
from calibre.debug import print_basic_debug_info
|
|
||||||
print_basic_debug_info(sys.stderr)
|
|
||||||
|
|
||||||
sites_lists = defaultdict(list)
|
|
||||||
[ sites_lists[x['site']].append(x) for x in book_list if x['good'] ]
|
|
||||||
|
|
||||||
totals = {}
|
|
||||||
# can't do direct assignment in list comprehension? I'm sure it
|
|
||||||
# makes sense to some pythonista.
|
|
||||||
# [ totals[x['url']]=0.0 for x in book_list if x['good'] ]
|
|
||||||
[ totals.update({x['url']:0.0}) for x in book_list if x['good'] ]
|
|
||||||
# logger.debug(sites_lists.keys())
|
|
||||||
|
|
||||||
# Queue all the jobs
|
|
||||||
jobs_running = 0
|
|
||||||
for site in sites_lists.keys():
|
|
||||||
site_list = sites_lists[site]
|
|
||||||
logger.info(_("Launch background process for site %s:")%site + "\n" +
|
|
||||||
"\n".join([ x['url'] for x in site_list ]))
|
|
||||||
# logger.debug([ x['url'] for x in site_list])
|
|
||||||
args = ['calibre_plugins.fanficfare_plugin.jobs',
|
|
||||||
'do_download_site',
|
|
||||||
(site,site_list,options,merge)]
|
|
||||||
job = ParallelJob('arbitrary_n',
|
|
||||||
"site:(%s)"%site,
|
|
||||||
done=None,
|
|
||||||
args=args)
|
|
||||||
job._site_list = site_list
|
|
||||||
job._processed = False
|
|
||||||
server.add_job(job)
|
|
||||||
jobs_running += 1
|
|
||||||
|
|
||||||
# This server is an arbitrary_n job, so there is a notifier available.
|
|
||||||
# Set the % complete to a small number to avoid the 'unavailable' indicator
|
|
||||||
notification(0.01, _('Downloading FanFiction Stories'))
|
|
||||||
|
|
||||||
# dequeue the job results as they arrive, saving the results
|
|
||||||
count = 0
|
|
||||||
while True:
|
|
||||||
job = server.changed_jobs_queue.get()
|
|
||||||
# logger.debug("job get job._processed:%s"%job._processed)
|
|
||||||
# A job can 'change' when it is not finished, for example if it
|
|
||||||
# produces a notification.
|
|
||||||
msg = None
|
|
||||||
try:
|
|
||||||
## msg = book['url']
|
|
||||||
(percent,msg) = job.notifications.get_nowait()
|
|
||||||
# logger.debug("%s<-%s"%(percent,msg))
|
|
||||||
if percent == 10.0: # Only when signaling d/l done.
|
|
||||||
count += 1
|
|
||||||
totals[msg] = 1.0/len(totals)
|
|
||||||
# logger.info("Finished: %s"%msg)
|
|
||||||
else:
|
|
||||||
totals[msg] = percent/len(totals)
|
|
||||||
notification(max(0.01,sum(totals.values())), _('%(count)d of %(total)d stories finished downloading')%{'count':count,'total':len(totals)})
|
|
||||||
except Empty:
|
|
||||||
pass
|
|
||||||
# without update, is_finished will never be set. however, we
|
|
||||||
# do want to get all the notifications for status so we don't
|
|
||||||
# miss the 'done' ones.
|
|
||||||
job.update(consume_notifications=False)
|
|
||||||
|
|
||||||
# if not job._processed:
|
|
||||||
# sleep(0.5)
|
|
||||||
## Can have a race condition where job.is_finished before
|
|
||||||
## notifications for all downloads have been processed.
|
|
||||||
## Or even after the job has been finished.
|
|
||||||
# logger.debug("job.is_finished(%s) or job._processed(%s)"%(job.is_finished, job._processed))
|
|
||||||
if not job.is_finished:
|
|
||||||
continue
|
|
||||||
|
|
||||||
## only process each job once. We can get more than one loop
|
|
||||||
## after job.is_finished.
|
|
||||||
if not job._processed:
|
|
||||||
# sleep(1)
|
|
||||||
# A job really finished. Get the information.
|
|
||||||
|
|
||||||
## This is where bg proc details end up in GUI log.
|
|
||||||
## job.details is the whole debug log for each proc.
|
|
||||||
logger.info("\n\n" + ("="*80) + " " + job.details.replace('\r',''))
|
|
||||||
# logger.debug("Finished background process for site %s:\n%s"%(job._site_list[0]['site'],"\n".join([ x['url'] for x in job._site_list ])))
|
|
||||||
for b in job._site_list:
|
|
||||||
book_list.remove(b)
|
|
||||||
book_list.extend(job.result)
|
|
||||||
job._processed = True
|
|
||||||
jobs_running -= 1
|
|
||||||
|
|
||||||
## Can't use individual count--I've seen stories all reported
|
|
||||||
## finished before results of all jobs processed.
|
|
||||||
if jobs_running == 0:
|
|
||||||
ret_list = finish_download(book_list)
|
|
||||||
break
|
|
||||||
|
|
||||||
server.close()
|
|
||||||
|
|
||||||
# return the book list as the job result
|
|
||||||
return ret_list
|
|
||||||
|
|
||||||
def do_download_worker_single(site,
|
def do_download_worker_single(site,
|
||||||
book_list,
|
book_list,
|
||||||
options,
|
options,
|
||||||
|
|
|
||||||
|
|
@ -197,7 +197,8 @@ default_prefs['auto_reject_from_email'] = False
|
||||||
default_prefs['update_existing_only_from_email'] = False
|
default_prefs['update_existing_only_from_email'] = False
|
||||||
default_prefs['download_from_email_immediately'] = False
|
default_prefs['download_from_email_immediately'] = False
|
||||||
|
|
||||||
default_prefs['single_proc_jobs'] = True
|
|
||||||
|
#default_prefs['single_proc_jobs'] = True # setting and code removed
|
||||||
default_prefs['site_split_jobs'] = True
|
default_prefs['site_split_jobs'] = True
|
||||||
default_prefs['reconsolidate_jobs'] = True
|
default_prefs['reconsolidate_jobs'] = True
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue