2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Celery tasks for djeuscan
|
|
|
|
"""
|
|
|
|
|
2012-08-11 15:09:42 +02:00
|
|
|
from datetime import datetime
|
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
from celery.task import task, group
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-08-11 15:09:42 +02:00
|
|
|
#import portage
|
|
|
|
|
2012-06-26 18:31:35 +02:00
|
|
|
from django.conf import settings
|
2012-08-03 21:55:59 +02:00
|
|
|
from django.core.cache import cache
|
2012-08-11 15:09:42 +02:00
|
|
|
from django.template.loader import render_to_string
|
|
|
|
from django.core.mail import send_mail
|
|
|
|
from django.db.models import Q
|
2012-06-26 18:31:35 +02:00
|
|
|
|
2012-08-11 15:09:42 +02:00
|
|
|
from euscan.version import gentoo_unstable
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-08-11 15:09:42 +02:00
|
|
|
from djeuscan.models import Package, RefreshPackageQuery, UserProfile, \
|
|
|
|
VersionLog
|
2012-07-20 08:25:04 +02:00
|
|
|
from djeuscan.processing import scan, misc
|
2012-08-14 11:45:52 +02:00
|
|
|
from djeuscan.helpers import get_account_versionlogs, get_user_fav_infos
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-06-09 16:12:17 +02:00
|
|
|
class TaskFailedException(Exception):
|
|
|
|
"""
|
|
|
|
Exception for failed tasks
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
def group_one(task, seq, *args, **kwargs):
|
2012-06-12 13:05:58 +02:00
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
Create a group of tasks, each task handle one element of seq
|
2012-06-12 13:05:58 +02:00
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
tasks = []
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-08-03 21:55:59 +02:00
|
|
|
if "attr_name" in kwargs:
|
|
|
|
attr_name = kwargs['attr_name']
|
|
|
|
del kwargs["attr_name"]
|
|
|
|
else:
|
|
|
|
attr_name = None
|
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
for elem in seq:
|
2012-08-03 21:55:59 +02:00
|
|
|
if attr_name:
|
|
|
|
kwargs[attr_name] = elem
|
2012-08-07 09:39:49 +02:00
|
|
|
tasks.append(task.subtask(args=list(args), kwargs=dict(kwargs),
|
|
|
|
immutable=True))
|
2012-07-20 13:25:03 +02:00
|
|
|
else:
|
2012-08-07 09:39:49 +02:00
|
|
|
tasks.append(task.subtask(args=[elem] + list(args),
|
|
|
|
kwargs=dict(kwargs), immutable=True))
|
2012-08-07 08:46:05 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
return group(tasks)
|
2012-06-08 14:21:11 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
def group_chunks(task, seq, n, *args, **kwargs):
|
2012-06-12 13:05:58 +02:00
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
Creates a group of tasks, each subtask has <n> elements to handle
|
2012-06-12 13:05:58 +02:00
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
tasks = []
|
|
|
|
for i in xrange(0, len(seq), n):
|
2012-07-20 13:25:03 +02:00
|
|
|
tasks.append(
|
2012-08-07 09:39:49 +02:00
|
|
|
task.subtask(args=[seq[i:i + n]] + list(args), kwargs=kwargs,
|
|
|
|
immutable=True)
|
2012-07-20 13:25:03 +02:00
|
|
|
)
|
2012-07-20 08:25:04 +02:00
|
|
|
return group(tasks)
|
2012-06-08 14:21:11 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-05-30 22:54:55 +02:00
|
|
|
@task
|
2012-07-20 08:25:04 +02:00
|
|
|
def regen_rrds():
|
2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Regenerate RRDs
|
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
misc.regen_rrds()
|
|
|
|
return True
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-05-30 22:54:55 +02:00
|
|
|
@task
|
2012-07-20 08:25:04 +02:00
|
|
|
def update_counters(fast=False):
|
2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Updates counters
|
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
logger = update_counters.get_logger()
|
|
|
|
logger.info("Updating counters (fast=%s)...", fast)
|
|
|
|
misc.update_counters(fast=fast)
|
|
|
|
logger.info("Done")
|
|
|
|
return True
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-05-30 22:54:55 +02:00
|
|
|
@task
|
2012-07-20 13:25:03 +02:00
|
|
|
def scan_metadata(packages=[], category=None, populate=False):
|
2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Scans metadata for the given set of packages
|
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
logger = scan_metadata.get_logger()
|
2012-05-31 09:11:40 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
if packages:
|
|
|
|
logger.info("Starting metadata scan for %d packages...",
|
|
|
|
len(packages))
|
|
|
|
elif category:
|
|
|
|
logger.info("Starting metadata scan for %s...",
|
|
|
|
category)
|
|
|
|
else:
|
|
|
|
logger.info("Starting metadata scan...")
|
|
|
|
|
|
|
|
scan.scan_metadata(
|
2012-06-22 09:32:39 +02:00
|
|
|
packages=packages,
|
2012-07-20 08:25:04 +02:00
|
|
|
category=category,
|
2012-06-22 09:32:39 +02:00
|
|
|
logger=logger,
|
2012-07-20 13:25:03 +02:00
|
|
|
populate=populate,
|
2012-06-22 09:32:39 +02:00
|
|
|
)
|
2012-07-20 08:25:04 +02:00
|
|
|
return True
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-05-30 22:54:55 +02:00
|
|
|
@task
|
2012-07-20 08:25:04 +02:00
|
|
|
def scan_portage(packages=[], category=None,
|
|
|
|
no_log=False, purge_packages=False,
|
|
|
|
purge_versions=False, prefetch=False):
|
2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Scans portage for the given set of packages
|
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
logger = scan_portage.get_logger()
|
|
|
|
|
2012-06-29 17:25:45 +02:00
|
|
|
if packages:
|
2012-07-20 08:25:04 +02:00
|
|
|
logger.info("Starting portage scan for %d packages...",
|
2012-06-29 17:25:45 +02:00
|
|
|
len(packages))
|
2012-07-20 08:25:04 +02:00
|
|
|
elif category:
|
|
|
|
logger.info("Starting portage scan for %s...",
|
|
|
|
category)
|
2012-06-29 17:25:45 +02:00
|
|
|
else:
|
2012-07-20 08:25:04 +02:00
|
|
|
logger.info("Starting portage scan...")
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
scan.scan_portage(
|
2012-06-22 09:32:39 +02:00
|
|
|
packages=packages,
|
2012-07-07 18:09:14 +02:00
|
|
|
category=category,
|
2012-06-27 10:15:30 +02:00
|
|
|
no_log=no_log,
|
2012-06-22 09:32:39 +02:00
|
|
|
purge_packages=purge_packages,
|
|
|
|
purge_versions=purge_versions,
|
|
|
|
prefetch=prefetch,
|
|
|
|
logger=logger,
|
|
|
|
)
|
2012-07-20 08:25:04 +02:00
|
|
|
return True
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-06-22 09:32:39 +02:00
|
|
|
@task
|
2012-07-20 08:25:04 +02:00
|
|
|
def scan_upstream(packages=[], purge_versions=False):
|
2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Scans upstream for the given set of packages
|
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
logger = scan_upstream.get_logger()
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
if len(packages):
|
|
|
|
logger.info("Starting upstream scan subtask for %d packages...",
|
|
|
|
len(packages))
|
|
|
|
else:
|
2012-08-14 09:11:23 +02:00
|
|
|
logger.info("Starting upstream scan...")
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
scan.scan_upstream(
|
2012-06-22 09:32:39 +02:00
|
|
|
packages=packages,
|
|
|
|
purge_versions=purge_versions,
|
|
|
|
logger=logger,
|
|
|
|
)
|
2012-07-20 08:25:04 +02:00
|
|
|
return True
|
2012-05-30 22:54:55 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-06-04 11:06:47 +02:00
|
|
|
@task
|
2012-07-20 08:25:04 +02:00
|
|
|
def update_portage_trees():
|
2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Update portage tree
|
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
logger = update_portage_trees.get_logger()
|
|
|
|
misc.update_portage_trees(logger=logger)
|
|
|
|
return True
|
2012-06-04 11:06:47 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-06-04 11:06:47 +02:00
|
|
|
@task
|
2012-07-20 08:25:04 +02:00
|
|
|
def update_portage(packages=None):
|
2012-08-11 15:09:42 +02:00
|
|
|
#categories = portage.settings.categories
|
2012-08-07 08:46:05 +02:00
|
|
|
|
2012-08-11 15:09:42 +02:00
|
|
|
# Workaround for celery bug when chaining groups
|
2012-08-07 08:46:05 +02:00
|
|
|
update_portage_trees()
|
2012-08-07 09:39:49 +02:00
|
|
|
scan_portage(packages=[], purge_packages=True, purge_versions=True,
|
|
|
|
prefetch=True)
|
2012-08-07 08:46:05 +02:00
|
|
|
scan_metadata(packages=[], populate=True)
|
|
|
|
update_counters(fast=False)
|
|
|
|
|
|
|
|
""" Currently broken
|
2012-07-20 13:25:03 +02:00
|
|
|
update_portage_trees()
|
2012-07-24 08:05:12 +02:00
|
|
|
scan_metadata(packages=None, populate=True)
|
2012-07-20 08:25:04 +02:00
|
|
|
(
|
2012-08-07 08:46:05 +02:00
|
|
|
group_one(scan_portage, categories,
|
2012-07-20 14:11:09 +02:00
|
|
|
attr_name="category", purge_packages=True,
|
|
|
|
purge_versions=True, prefetch=True) |
|
2012-08-07 08:46:05 +02:00
|
|
|
group_one(scan_metadata, categories,
|
2012-07-20 13:25:03 +02:00
|
|
|
attr_name="category") |
|
2012-08-07 08:46:05 +02:00
|
|
|
update_counters.si(fast=True)
|
2012-07-20 08:25:04 +02:00
|
|
|
)()
|
2012-08-07 08:46:05 +02:00
|
|
|
"""
|
2012-07-20 08:25:04 +02:00
|
|
|
return True
|
2012-06-04 11:06:47 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
@task
|
|
|
|
def update_upstream():
|
|
|
|
if settings.TASKS_UPSTREAM_GROUPS >= 1:
|
2012-07-29 10:13:17 +02:00
|
|
|
packages = Package.objects.all().order_by('pk') # .order_by('?') ?
|
2012-06-04 11:06:47 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
scan_upstream_sub = group_chunks(scan_upstream, packages,
|
|
|
|
settings.TASKS_UPSTREAM_GROUPS,
|
|
|
|
purge_versions=True)
|
2012-07-07 18:09:14 +02:00
|
|
|
else:
|
2012-07-20 08:25:04 +02:00
|
|
|
scan_upstream_sub = scan_upstream.si(purge_versions=True)
|
2012-06-04 11:06:47 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
(
|
|
|
|
scan_upstream_sub |
|
2012-08-11 15:09:42 +02:00
|
|
|
update_counters.si(fast=False) |
|
|
|
|
send_update_email.si()
|
2012-07-20 08:25:04 +02:00
|
|
|
)()
|
|
|
|
return True
|
2012-06-04 11:06:47 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-06-22 11:40:40 +02:00
|
|
|
@task
|
2012-07-20 08:25:04 +02:00
|
|
|
def scan_package(package):
|
2012-08-14 13:51:07 +02:00
|
|
|
logger = scan_package.get_logger()
|
|
|
|
logger.info("Scanning package %s", package)
|
2012-07-20 08:25:04 +02:00
|
|
|
scan_portage([package], purge_packages=True, purge_versions=True)
|
|
|
|
scan_metadata([package])
|
2012-08-15 11:04:29 +02:00
|
|
|
scan_upstream([package], purge_versions=True)
|
2012-07-20 08:25:04 +02:00
|
|
|
return True
|
2012-06-22 11:40:40 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-07-20 08:25:04 +02:00
|
|
|
@task(rate_limit="1/m")
|
|
|
|
def scan_package_user(package):
|
|
|
|
scan_package(package)
|
|
|
|
return True
|
2012-07-07 15:13:59 +02:00
|
|
|
|
2012-07-20 13:25:03 +02:00
|
|
|
|
2012-10-10 09:36:25 +02:00
|
|
|
@task(rate_limit="1/m")
|
2012-08-03 21:55:59 +02:00
|
|
|
def consume_refresh_queue(locked=False):
|
2012-06-22 09:32:39 +02:00
|
|
|
"""
|
|
|
|
Satisfies user requests for package refreshing, runs every minute
|
|
|
|
"""
|
2012-08-03 21:55:59 +02:00
|
|
|
logger = consume_refresh_queue.get_logger()
|
2012-08-11 15:09:42 +02:00
|
|
|
logger.info('Consuming package refresh request queue...')
|
2012-08-03 21:55:59 +02:00
|
|
|
|
2012-06-08 14:21:11 +02:00
|
|
|
try:
|
2012-07-20 08:25:04 +02:00
|
|
|
query = RefreshPackageQuery.objects.all().order_by('-priority')[0]
|
2012-08-03 21:55:59 +02:00
|
|
|
pkg = query.package
|
|
|
|
query.delete()
|
|
|
|
scan_package_user.delay(pkg)
|
2012-08-15 11:04:29 +02:00
|
|
|
logger.info('Selected: %s' % pkg)
|
2012-06-27 10:15:30 +02:00
|
|
|
except IndexError:
|
2012-10-10 09:36:25 +02:00
|
|
|
return
|
2012-06-08 14:21:11 +02:00
|
|
|
|
2012-08-03 21:55:59 +02:00
|
|
|
if RefreshPackageQuery.objects.count():
|
|
|
|
logger.info('Restarting myself in 60s')
|
2012-08-07 09:39:49 +02:00
|
|
|
consume_refresh_queue.apply_async(
|
|
|
|
kwargs={'locked': True}, countdown=60
|
|
|
|
)
|
2012-10-10 09:36:25 +02:00
|
|
|
return True
|
2012-08-11 15:09:42 +02:00
|
|
|
|
|
|
|
@task(max_retries=10, default_retry_delay=10 * 60)
|
|
|
|
def send_user_email(address, subject, text):
|
|
|
|
try:
|
|
|
|
send_mail(
|
2012-08-14 13:51:07 +02:00
|
|
|
subject, text, settings.DEFAULT_FROM_EMAIL, [address],
|
|
|
|
fail_silently=False
|
2012-08-11 15:09:42 +02:00
|
|
|
)
|
|
|
|
except Exception, exc:
|
|
|
|
raise send_user_email.retry(exc=exc)
|
2012-10-10 09:36:25 +02:00
|
|
|
return True
|
2012-08-11 15:09:42 +02:00
|
|
|
|
|
|
|
@task
|
2012-08-14 11:45:52 +02:00
|
|
|
def process_emails(profiles, only_if_vlogs=False):
|
2012-08-11 15:09:42 +02:00
|
|
|
for profile in profiles:
|
|
|
|
now = datetime.now()
|
|
|
|
user = profile.user
|
|
|
|
|
|
|
|
vlogs = get_account_versionlogs(profile)
|
|
|
|
vlogs = vlogs.filter(
|
|
|
|
datetime__gt=profile.last_email,
|
|
|
|
overlay="", # only upstream versions
|
|
|
|
action=VersionLog.VERSION_ADDED, # only adds
|
|
|
|
)
|
|
|
|
if profile.email_ignore_pre:
|
|
|
|
vlogs = vlogs.exclude(vtype__in=gentoo_unstable)
|
|
|
|
if profile.email_ignore_pre_if_stable:
|
|
|
|
vlogs = vlogs.exclude(
|
|
|
|
~Q(package__last_version_gentoo__vtype__in=gentoo_unstable),
|
|
|
|
vtype__in=gentoo_unstable
|
|
|
|
)
|
|
|
|
|
2012-08-14 11:45:52 +02:00
|
|
|
if only_if_vlogs and not vlogs.count():
|
2012-08-11 15:09:42 +02:00
|
|
|
continue
|
|
|
|
|
2012-09-03 15:09:12 +02:00
|
|
|
vlogs = vlogs.order_by("-datetime")
|
|
|
|
|
2012-08-14 11:45:52 +02:00
|
|
|
infos = get_user_fav_infos(user)
|
|
|
|
infos["user"] = user
|
|
|
|
infos["vlogs"] = vlogs
|
|
|
|
|
2012-08-11 15:09:42 +02:00
|
|
|
mail_text = render_to_string(
|
|
|
|
"euscan/accounts/euscan_email.txt",
|
2012-08-14 11:45:52 +02:00
|
|
|
infos
|
2012-08-11 15:09:42 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
send_user_email.delay(
|
|
|
|
user.email, "euscan updates - %s" % str(now.date()), mail_text
|
|
|
|
)
|
|
|
|
|
|
|
|
profile.last_email = now
|
|
|
|
profile.save(force_update=True)
|
2012-10-10 09:36:25 +02:00
|
|
|
return True
|
2012-08-11 15:09:42 +02:00
|
|
|
|
|
|
|
@task
|
|
|
|
def send_update_email():
|
2012-08-14 11:45:52 +02:00
|
|
|
profiles = UserProfile.objects.filter(
|
|
|
|
email_every=UserProfile.EMAIL_SCAN,
|
|
|
|
email_activated=True
|
|
|
|
)
|
|
|
|
group_chunks(
|
|
|
|
process_emails,
|
|
|
|
profiles,
|
|
|
|
settings.TASKS_EMAIL_GROUPS,
|
|
|
|
only_if_vlogs=True
|
|
|
|
)()
|
2012-10-10 09:36:25 +02:00
|
|
|
return True
|
2012-08-11 15:09:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def send_weekly_email():
|
2012-08-14 11:45:52 +02:00
|
|
|
profiles = UserProfile.objects.filter(
|
|
|
|
email_every=UserProfile.EMAIL_WEEKLY,
|
|
|
|
email_activated=True
|
|
|
|
)
|
2012-08-11 15:09:42 +02:00
|
|
|
group_chunks(process_emails, profiles, settings.TASKS_EMAIL_GROUPS)()
|
2012-10-10 09:36:25 +02:00
|
|
|
return True
|
2012-08-11 15:09:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
@task
|
|
|
|
def send_monthly_email():
|
|
|
|
profiles = UserProfile.objects.filter(
|
2012-08-14 11:45:52 +02:00
|
|
|
email_every=UserProfile.EMAIL_MONTHLY,
|
|
|
|
email_activated=True
|
2012-08-11 15:09:42 +02:00
|
|
|
)
|
|
|
|
group_chunks(process_emails, profiles, settings.TASKS_EMAIL_GROUPS)()
|
2012-10-10 09:36:25 +02:00
|
|
|
return True
|
2012-08-11 15:09:42 +02:00
|
|
|
|
2012-06-08 14:21:11 +02:00
|
|
|
admin_tasks = [
|
2012-07-20 08:25:04 +02:00
|
|
|
regen_rrds,
|
|
|
|
update_counters,
|
|
|
|
scan_metadata,
|
|
|
|
scan_portage,
|
|
|
|
scan_upstream,
|
|
|
|
update_portage_trees,
|
|
|
|
update_portage,
|
|
|
|
update_upstream,
|
|
|
|
scan_package,
|
2012-09-14 10:29:06 +02:00
|
|
|
send_update_email,
|
|
|
|
send_weekly_email,
|
|
|
|
send_monthly_email,
|
2012-06-03 09:43:49 +02:00
|
|
|
]
|