2012-06-22 09:32:39 +02:00
|
|
|
import portage
|
|
|
|
|
|
|
|
from django.utils import timezone
|
|
|
|
from django.db.transaction import commit_on_success
|
|
|
|
|
|
|
|
from euscan import CONFIG, output
|
|
|
|
from euscan.scan import scan_upstream as euscan_scan_upstream
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
from djeuscan.processing import FakeLogger
|
2012-06-22 09:32:39 +02:00
|
|
|
from djeuscan.models import Package, Version, EuscanResult, VersionLog
|
|
|
|
|
|
|
|
|
|
|
|
class ScanUpstream(object):
|
2012-07-02 18:16:59 +02:00
|
|
|
def __init__(self, logger=None, purge_versions=False):
|
2012-06-22 11:31:15 +02:00
|
|
|
self.logger = logger or FakeLogger()
|
2012-07-02 18:16:59 +02:00
|
|
|
self.purge_versions = purge_versions
|
2012-12-12 22:54:50 +01:00
|
|
|
self._versions = set()
|
|
|
|
self._versions_seen = set()
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
def scan(self, package):
|
|
|
|
CONFIG["format"] = "dict"
|
2012-06-29 17:25:45 +02:00
|
|
|
output.clean()
|
2012-06-22 09:32:39 +02:00
|
|
|
output.set_query(package)
|
|
|
|
|
|
|
|
euscan_scan_upstream(package)
|
|
|
|
|
|
|
|
out = output.get_formatted_output()
|
|
|
|
out_json = output.get_formatted_output("json")
|
|
|
|
|
|
|
|
try:
|
|
|
|
cpv = out[package]["metadata"]["cpv"]
|
2012-06-26 18:13:28 +02:00
|
|
|
scan_time = out[package]["metadata"]["scan_time"]
|
|
|
|
ebuild = out[package]["metadata"]["ebuild"]
|
2012-06-22 09:32:39 +02:00
|
|
|
except KeyError:
|
2012-07-09 18:15:34 +02:00
|
|
|
self.logger.error(
|
2012-07-24 08:03:42 +02:00
|
|
|
"Error while scanning upstream for package %s!",
|
|
|
|
package
|
|
|
|
)
|
|
|
|
self.logger.debug(
|
|
|
|
"Error %s",
|
2012-07-09 18:15:34 +02:00
|
|
|
out_json
|
|
|
|
)
|
2012-06-22 09:32:39 +02:00
|
|
|
return {}
|
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
obj = self.store_package(cpv)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
for res in out[package]["result"]:
|
|
|
|
self.store_version(
|
|
|
|
obj,
|
|
|
|
res["version"],
|
|
|
|
" ".join(res["urls"]),
|
|
|
|
res["type"],
|
|
|
|
res["handler"],
|
|
|
|
res["confidence"],
|
|
|
|
)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-08-03 21:59:09 +02:00
|
|
|
self.store_result(obj, out_json, scan_time, ebuild)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-26 18:13:28 +02:00
|
|
|
def store_result(self, package, formatted_log, scan_time, ebuild):
|
2012-06-22 09:32:39 +02:00
|
|
|
# Remove previous logs
|
|
|
|
EuscanResult.objects.filter(package=package).delete()
|
|
|
|
|
|
|
|
obj = EuscanResult()
|
|
|
|
obj.package = package
|
2012-06-26 18:13:28 +02:00
|
|
|
obj.result = formatted_log
|
2012-06-22 09:32:39 +02:00
|
|
|
obj.datetime = timezone.now()
|
2012-06-26 18:13:28 +02:00
|
|
|
obj.scan_time = scan_time
|
|
|
|
obj.ebuild = ebuild
|
2012-06-22 09:32:39 +02:00
|
|
|
obj.save()
|
|
|
|
|
|
|
|
def store_package(self, cpv):
|
|
|
|
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
|
|
|
|
|
|
|
obj, created = Package.objects.get_or_create(category=cat, name=pkg)
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
if created:
|
|
|
|
self.logger.info('+ [p] %s/%s' % (cat, pkg))
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-12-12 22:54:50 +01:00
|
|
|
versions = Version.objects.filter(
|
|
|
|
package=obj, packaged=False
|
|
|
|
)
|
|
|
|
for version in versions:
|
|
|
|
self._versions.add(version)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
return obj
|
|
|
|
|
2012-06-29 17:25:45 +02:00
|
|
|
def store_version(self, package, ver, url, version_type, handler,
|
|
|
|
confidence):
|
2012-06-22 09:32:39 +02:00
|
|
|
obj, created = Version.objects.get_or_create(
|
2012-06-29 17:25:45 +02:00
|
|
|
package=package,
|
|
|
|
revision='r0',
|
|
|
|
version=ver,
|
|
|
|
overlay='',
|
2013-01-07 20:38:32 +01:00
|
|
|
defaults={"slot": '', "urls": url, "packaged": False,
|
2012-07-17 12:30:12 +02:00
|
|
|
"vtype": version_type, "handler": handler,
|
2012-06-29 17:25:45 +02:00
|
|
|
"confidence": confidence}
|
2012-06-22 09:32:39 +02:00
|
|
|
)
|
2012-12-12 22:54:50 +01:00
|
|
|
|
2012-06-22 09:32:39 +02:00
|
|
|
if not created:
|
2012-12-12 22:54:50 +01:00
|
|
|
obj.slot = ''
|
2012-06-22 09:32:39 +02:00
|
|
|
obj.urls = url
|
|
|
|
obj.packaged = False
|
|
|
|
obj.save()
|
|
|
|
|
2012-12-12 22:54:50 +01:00
|
|
|
self._versions_seen.add(obj)
|
|
|
|
|
2012-06-22 09:32:39 +02:00
|
|
|
# If it's not a new version, just update the object and continue
|
|
|
|
if not created:
|
|
|
|
return
|
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
self.logger.info('+ [u] %s %s' % (obj, url))
|
2012-06-22 09:32:39 +02:00
|
|
|
|
|
|
|
VersionLog.objects.create(
|
|
|
|
package=package,
|
|
|
|
action=VersionLog.VERSION_ADDED,
|
|
|
|
slot='',
|
|
|
|
revision='r0',
|
|
|
|
version=ver,
|
2012-08-11 13:31:57 +02:00
|
|
|
overlay='',
|
|
|
|
vtype=version_type,
|
2012-06-22 09:32:39 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
package.n_versions += 1
|
|
|
|
package.save()
|
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
def purge_old_versions(self):
|
|
|
|
if not self.purge_versions:
|
|
|
|
return
|
2012-06-22 11:31:15 +02:00
|
|
|
|
2012-12-12 22:54:50 +01:00
|
|
|
versions = self._versions.difference(self._versions_seen)
|
2012-07-02 18:16:59 +02:00
|
|
|
for version in versions:
|
2012-12-12 22:54:50 +01:00
|
|
|
if version.packaged == True:
|
2013-01-07 20:38:32 +01:00
|
|
|
continue # Not our job
|
2012-07-02 18:16:59 +02:00
|
|
|
VersionLog.objects.create(
|
|
|
|
package=version.package,
|
|
|
|
action=VersionLog.VERSION_REMOVED,
|
|
|
|
slot=version.slot,
|
|
|
|
revision=version.revision,
|
|
|
|
version=version.version,
|
2012-08-11 13:31:57 +02:00
|
|
|
overlay=version.overlay,
|
|
|
|
vtype=version.vtype,
|
2012-07-02 18:16:59 +02:00
|
|
|
)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
version.package.n_versions -= 1
|
|
|
|
version.package.save()
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
self.logger.info('- [u] %s %s' % (version, version.urls))
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-12-12 22:54:50 +01:00
|
|
|
version.delete()
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-07-07 15:13:59 +02:00
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
@commit_on_success
|
2012-06-22 11:31:15 +02:00
|
|
|
def scan_upstream(packages=None, purge_versions=False,
|
|
|
|
logger=None):
|
|
|
|
logger = logger or FakeLogger()
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
scan_handler = ScanUpstream(logger=logger, purge_versions=purge_versions)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
logger.info('Scanning upstream...')
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-29 17:25:45 +02:00
|
|
|
if not packages:
|
2012-06-22 09:32:39 +02:00
|
|
|
packages = Package.objects.all()
|
|
|
|
|
|
|
|
for pkg in packages:
|
2012-07-09 18:15:34 +02:00
|
|
|
try:
|
2012-07-20 08:25:04 +02:00
|
|
|
package = '%s/%s' % (pkg.category, pkg.name)
|
2012-07-09 18:15:34 +02:00
|
|
|
except AttributeError:
|
2012-07-20 08:25:04 +02:00
|
|
|
package = pkg
|
|
|
|
|
|
|
|
logger.info('Scanning %s' % package)
|
|
|
|
scan_handler.scan(package)
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-07-02 18:16:59 +02:00
|
|
|
scan_handler.purge_old_versions()
|
2012-06-22 09:32:39 +02:00
|
|
|
|
2012-06-22 11:31:15 +02:00
|
|
|
logger.info('Done.')
|