euscanwww: bug found, celery timeouts on join. Commented out

Signed-off-by: volpino <fox91@anche.no>
This commit is contained in:
volpino 2012-06-09 16:12:17 +02:00
parent 9cddc82808
commit 4246c544ad
2 changed files with 17 additions and 3 deletions

View File

@ -18,6 +18,13 @@ from djeuscan.management.commands.scan_upstream import ScanUpstream, \
purge_versions as scan_upstream_purge purge_versions as scan_upstream_purge
class TaskFailedException(Exception):
"""
Exception for failed tasks
"""
pass
def _launch_command(cmd): def _launch_command(cmd):
fp = subprocess.Popen(cmd, stdout=subprocess.PIPE, fp = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
@ -38,7 +45,8 @@ def _run_in_chunks(task, iterable, n=32):
for args in chunk for args in chunk
]) ])
result = job.apply_async() result = job.apply_async()
output.extend(list(result.join())) # TODO: understand why this causes timeout
#output.extend(list(result.join(timeout=3600)))
return output return output
@ -114,7 +122,10 @@ def scan_upstream_task(query):
logger.info("Starting upstream scanning for package %s ...", query) logger.info("Starting upstream scanning for package %s ...", query)
scan_upstream = ScanUpstream() scan_upstream = ScanUpstream()
return scan_upstream.scan(query) result = scan_upstream.scan(query)
if not result:
raise TaskFailedException("Couldn't scan upstream for this package")
return result
@task @task
@ -127,7 +138,8 @@ def scan_upstream_all_task(purge=False):
output = _run_in_chunks( output = _run_in_chunks(
scan_upstream_task, scan_upstream_task,
[('%s/%s' % (pkg.category, pkg.name), ) [('%s/%s' % (pkg.category, pkg.name), )
for pkg in Package.objects.all()] for pkg in Package.objects.all()],
n=16
) )
if purge: if purge:

View File

@ -213,6 +213,8 @@ import djcelery
djcelery.setup_loader() djcelery.setup_loader()
BROKER_URL = "amqp://guest:guest@localhost:5672//" BROKER_URL = "amqp://guest:guest@localhost:5672//"
CELERY_RESULT_BACKEND = "amqp" CELERY_RESULT_BACKEND = "amqp"
BROKER_CONNECTION_TIMEOUT = 600
CELERYD_CONCURRENCY = 4
try: try: