2544af2e32
- strip '_task' end - remove "launcher" functions, if we want complicated starter functions we will put them somewhere else later. - now, everything is asynchroneous, maybe we could switch from group_one()/group_chunks() to .chunks() someday... Signed-off-by: Corentin Chary <corentin.chary@gmail.com>
222 lines
5.3 KiB
Python
222 lines
5.3 KiB
Python
"""
|
|
Celery tasks for djeuscan
|
|
"""
|
|
|
|
from itertools import islice
|
|
|
|
from celery.task import task, group, chord
|
|
|
|
from django.conf import settings
|
|
|
|
from djeuscan.models import Package, RefreshPackageQuery
|
|
from djeuscan.processing import scan, misc
|
|
from djeuscan.utils import queryset_iterator
|
|
|
|
class TaskFailedException(Exception):
|
|
"""
|
|
Exception for failed tasks
|
|
"""
|
|
pass
|
|
|
|
def group_one(task, seq, *args, **kwargs):
|
|
"""
|
|
Create a group of tasks, each task handle one element of seq
|
|
"""
|
|
tasks = []
|
|
for i in seq:
|
|
tasks.append(task.subtask(args=[seq[i]] + list(args), kwargs=kwargs))
|
|
return group(tasks)
|
|
|
|
def group_chunks(task, seq, n, *args, **kwargs):
|
|
"""
|
|
Creates a group of tasks, each subtask has <n> elements to handle
|
|
"""
|
|
tasks = []
|
|
for i in xrange(0, len(seq), n):
|
|
tasks.append(task.subtask(args=[seq[i:i+n]] + list(args), kwargs=kwargs))
|
|
return group(tasks)
|
|
|
|
@task
|
|
def regen_rrds():
|
|
"""
|
|
Regenerate RRDs
|
|
"""
|
|
misc.regen_rrds()
|
|
return True
|
|
|
|
@task
|
|
def update_counters(fast=False):
|
|
"""
|
|
Updates counters
|
|
"""
|
|
logger = update_counters.get_logger()
|
|
logger.info("Updating counters (fast=%s)...", fast)
|
|
misc.update_counters(fast=fast)
|
|
logger.info("Done")
|
|
return True
|
|
|
|
@task
|
|
def scan_metadata(packages=[], category=None):
|
|
"""
|
|
Scans metadata for the given set of packages
|
|
"""
|
|
logger = scan_metadata.get_logger()
|
|
|
|
if packages:
|
|
logger.info("Starting metadata scan for %d packages...",
|
|
len(packages))
|
|
elif category:
|
|
logger.info("Starting metadata scan for %s...",
|
|
category)
|
|
else:
|
|
logger.info("Starting metadata scan...")
|
|
|
|
scan.scan_metadata(
|
|
packages=packages,
|
|
category=category,
|
|
logger=logger,
|
|
)
|
|
return True
|
|
|
|
@task
|
|
def scan_portage(packages=[], category=None,
|
|
no_log=False, purge_packages=False,
|
|
purge_versions=False, prefetch=False):
|
|
"""
|
|
Scans portage for the given set of packages
|
|
"""
|
|
logger = scan_portage.get_logger()
|
|
|
|
if packages:
|
|
logger.info("Starting portage scan for %d packages...",
|
|
len(packages))
|
|
elif category:
|
|
logger.info("Starting portage scan for %s...",
|
|
category)
|
|
else:
|
|
logger.info("Starting portage scan...")
|
|
|
|
scan.scan_portage(
|
|
packages=packages,
|
|
category=category,
|
|
no_log=no_log,
|
|
purge_packages=purge_packages,
|
|
purge_versions=purge_versions,
|
|
prefetch=prefetch,
|
|
logger=logger,
|
|
)
|
|
return True
|
|
|
|
@task
|
|
def scan_upstream(packages=[], purge_versions=False):
|
|
"""
|
|
Scans upstream for the given set of packages
|
|
"""
|
|
logger = scan_upstream.get_logger()
|
|
|
|
if len(packages):
|
|
logger.info("Starting upstream scan subtask for %d packages...",
|
|
len(packages))
|
|
else:
|
|
logger.info("Starting upstream scan...",
|
|
len(packages))
|
|
|
|
scan.scan_upstream(
|
|
packages=packages,
|
|
purge_versions=purge_versions,
|
|
logger=logger,
|
|
)
|
|
return True
|
|
|
|
@task
|
|
def update_portage_trees():
|
|
"""
|
|
Update portage tree
|
|
"""
|
|
logger = update_portage_trees.get_logger()
|
|
misc.update_portage_trees(logger=logger)
|
|
return True
|
|
|
|
@task
|
|
def update_portage(packages=None):
|
|
(
|
|
update_portage_trees.s() |
|
|
scan_portage.si(purge_packages=True, purge_versions=True, prefetch=True) |
|
|
#scan_metadata.si() |
|
|
group_one(scan_metadata, portage.settings.categories) |
|
|
update_counters.si(fast=False)
|
|
)()
|
|
return True
|
|
|
|
@task
|
|
def update_upstream():
|
|
if settings.TASKS_UPSTREAM_GROUPS >= 1:
|
|
packages = Package.objects.all()
|
|
|
|
scan_upstream_sub = group_chunks(scan_upstream, packages,
|
|
settings.TASKS_UPSTREAM_GROUPS,
|
|
purge_versions=True)
|
|
else:
|
|
scan_upstream_sub = scan_upstream.si(purge_versions=True)
|
|
|
|
(
|
|
scan_upstream_sub |
|
|
update_counters.si(fast=False)
|
|
)()
|
|
return True
|
|
|
|
@task
|
|
def scan_package(package):
|
|
scan_portage([package], purge_packages=True, purge_versions=True)
|
|
scan_metadata([package])
|
|
scan_upstream([package])
|
|
return True
|
|
|
|
@task(rate_limit="1/m")
|
|
def scan_package_user(package):
|
|
scan_package(package)
|
|
return True
|
|
|
|
@task
|
|
def consume_refresh_package_request():
|
|
"""
|
|
Satisfies user requests for package refreshing, runs every minute
|
|
"""
|
|
try:
|
|
query = RefreshPackageQuery.objects.all().order_by('-priority')[0]
|
|
except IndexError:
|
|
return
|
|
|
|
pkg = query.package
|
|
query.delete()
|
|
scan_package_user.delay(pkg)
|
|
|
|
admin_tasks = [
|
|
regen_rrds,
|
|
update_counters,
|
|
scan_metadata,
|
|
scan_portage,
|
|
scan_upstream,
|
|
update_portage_trees,
|
|
update_portage,
|
|
update_upstream,
|
|
scan_package,
|
|
]
|
|
|
|
""" Chunk helpers (chunks can't use keyword arguments) """
|
|
@task
|
|
def scan_metadata_category(category):
|
|
"""
|
|
Helper for calling scan_metadata with a category
|
|
"""
|
|
scan_metadata(category=category)
|
|
return True
|
|
|
|
@task
|
|
def scan_upstream_purge(*packages):
|
|
"""
|
|
Helper for calling scan_upstream with purge_versions=True
|
|
"""
|
|
scan_upstream(packages, purge_versions=True)
|
|
return True
|