2012-06-22 09:32:39 +02:00
|
|
|
import portage
|
|
|
|
|
|
|
|
from django.utils import timezone
|
|
|
|
from django.db.transaction import commit_on_success
|
|
|
|
|
|
|
|
from euscan import CONFIG, output
|
|
|
|
from euscan.scan import scan_upstream as euscan_scan_upstream
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
from djeuscan.processing import FakeLogger
|
2012-06-22 09:32:39 +02:00
|
|
|
from djeuscan.models import Package, Version, EuscanResult, VersionLog
|
|
|
|
|
|
|
|
|
|
|
|
class ScanUpstream(object):
|
2012-06-22 11:31:15 +02:00
|
|
|
def __init__(self, logger=None):
|
|
|
|
self.logger = logger or FakeLogger()
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
def scan(self, package):
|
|
|
|
CONFIG["format"] = "dict"
|
|
|
|
output.set_query(package)
|
|
|
|
|
|
|
|
euscan_scan_upstream(package)
|
|
|
|
|
|
|
|
out = output.get_formatted_output()
|
|
|
|
out_json = output.get_formatted_output("json")
|
|
|
|
|
|
|
|
try:
|
|
|
|
cpv = out[package]["metadata"]["cpv"]
|
2012-06-26 18:13:28 +02:00
|
|
|
scan_time = out[package]["metadata"]["scan_time"]
|
|
|
|
ebuild = out[package]["metadata"]["ebuild"]
|
2012-06-22 09:32:39 +02:00
|
|
|
except KeyError:
|
|
|
|
return {}
|
|
|
|
|
2012-06-26 18:13:28 +02:00
|
|
|
with commit_on_success():
|
|
|
|
obj = self.store_package(cpv)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-26 18:13:28 +02:00
|
|
|
for res in out[package]["result"]:
|
|
|
|
self.store_version(obj, res["version"], " ".join(res["urls"]))
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-26 18:13:28 +02:00
|
|
|
self.store_result(obj, out_json, scan_time, ebuild)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
return out
|
|
|
|
|
2012-06-26 18:13:28 +02:00
|
|
|
def store_result(self, package, formatted_log, scan_time, ebuild):
|
2012-06-22 09:32:39 +02:00
|
|
|
# Remove previous logs
|
|
|
|
EuscanResult.objects.filter(package=package).delete()
|
|
|
|
|
|
|
|
obj = EuscanResult()
|
|
|
|
obj.package = package
|
2012-06-26 18:13:28 +02:00
|
|
|
obj.result = formatted_log
|
2012-06-22 09:32:39 +02:00
|
|
|
obj.datetime = timezone.now()
|
2012-06-26 18:13:28 +02:00
|
|
|
obj.scan_time = scan_time
|
|
|
|
obj.ebuild = ebuild
|
2012-06-22 09:32:39 +02:00
|
|
|
obj.save()
|
|
|
|
|
|
|
|
def store_package(self, cpv):
|
|
|
|
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
|
|
|
|
|
|
|
obj, created = Package.objects.get_or_create(category=cat, name=pkg)
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
if created:
|
|
|
|
self.logger.info('+ [p] %s/%s' % (cat, pkg))
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
# Set all versions dead, then set found versions alive and
|
|
|
|
# delete old versions
|
|
|
|
Version.objects.filter(package=obj, packaged=False).update(alive=False)
|
|
|
|
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def store_version(self, package, ver, url):
|
|
|
|
obj, created = Version.objects.get_or_create(
|
|
|
|
package=package, slot='', revision='r0', version=ver, overlay='',
|
|
|
|
defaults={"alive": True, "urls": url, "packaged": False}
|
|
|
|
)
|
|
|
|
if not created:
|
|
|
|
obj.alive = True
|
|
|
|
obj.urls = url
|
|
|
|
obj.packaged = False
|
|
|
|
obj.save()
|
|
|
|
|
|
|
|
# If it's not a new version, just update the object and continue
|
|
|
|
if not created:
|
|
|
|
return
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
self.logger.info('+ [u] %s %s' % (obj, url))
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
VersionLog.objects.create(
|
|
|
|
package=package,
|
|
|
|
action=VersionLog.VERSION_ADDED,
|
|
|
|
slot='',
|
|
|
|
revision='r0',
|
|
|
|
version=ver,
|
|
|
|
overlay=''
|
|
|
|
)
|
|
|
|
|
|
|
|
package.n_versions += 1
|
|
|
|
package.save()
|
|
|
|
|
|
|
|
|
|
|
|
@commit_on_success
|
2012-06-22 11:31:15 +02:00
|
|
|
def purge_versions(logger=None):
|
|
|
|
logger = logger or FakeLogger()
|
|
|
|
|
2012-06-22 09:32:39 +02:00
|
|
|
# For each dead versions
|
|
|
|
for version in Version.objects.filter(packaged=False, alive=False):
|
|
|
|
VersionLog.objects.create(
|
|
|
|
package=version.package,
|
|
|
|
action=VersionLog.VERSION_REMOVED,
|
|
|
|
slot=version.slot,
|
|
|
|
revision=version.revision,
|
|
|
|
version=version.version,
|
|
|
|
overlay=version.overlay
|
|
|
|
)
|
|
|
|
|
|
|
|
version.package.n_versions -= 1
|
|
|
|
version.package.save()
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
logger.info('- [u] %s %s' % (version, version.urls))
|
2012-06-22 09:32:39 +02:00
|
|
|
Version.objects.filter(packaged=False, alive=False).delete()
|
|
|
|
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
def scan_upstream(packages=None, purge_versions=False,
|
|
|
|
logger=None):
|
|
|
|
logger = logger or FakeLogger()
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
scan_handler = ScanUpstream(logger=logger)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
logger.info('Scanning upstream...')
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
if packages is None:
|
|
|
|
packages = Package.objects.all()
|
|
|
|
|
|
|
|
for pkg in packages:
|
|
|
|
if isinstance(pkg, Package):
|
|
|
|
scan_handler.scan('%s/%s' % (pkg.category, pkg.name))
|
|
|
|
else:
|
|
|
|
scan_handler.scan(pkg)
|
|
|
|
|
|
|
|
if purge_versions:
|
2012-06-22 11:31:15 +02:00
|
|
|
purge_versions(logger=logger)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
logger.info('Done.')
|