Now I remember why cleanup was limited--task timeout. Basic reverted to 100

each time, but still by time not day, orphan search will have to become a queue
task--unscheduled for now.
This commit is contained in:
Jim Miller 2011-06-03 20:12:04 -05:00
parent f2283a2052
commit 152ee61ed4
2 changed files with 12 additions and 18 deletions

View file

@ -2,7 +2,3 @@ cron:
- description: cleanup job
url: /r3m0v3r
schedule: every 2 hours
- description: orphan cleanup job
url: /r3m0v3rOrphans
schedule: every 48 hours

View file

@ -42,20 +42,18 @@ class Remover(webapp.RequestHandler):
fics = DownloadMeta.all()
fics.filter("date <",theDate).order("date")
num = 0
while( True ) :
results = fics.fetch(100)
if not results:
self.response.out.write('Finished<br>')
break
logging.debug([x.name for x in results])
results = fics.fetch(100)
if not results:
self.response.out.write('Finished<br>')
break
logging.debug([x.name for x in results])
for d in results:
d.delete()
for c in d.data_chunks:
c.delete()
num += 1
logging.debug('Delete '+d.url)
for d in results:
d.delete()
for c in d.data_chunks:
c.delete()
num += 1
logging.debug('Delete '+d.url)
logging.info('Deleted instances: %d' % num)
self.response.out.write('Deleted instances: %d<br>' % num)
@ -70,7 +68,7 @@ class RemoveOrphanDataChunks(webapp.RequestHandler):
deleted = 0
num = 0
step=2
step=100
while( True ) :
results = chunks.fetch(limit=step,offset=num-deleted)
if not results: