euscan/euscawww: Removing unused imports, fixed some typos and PEP8 cleaning

Signed-off-by: volpino <fox91@anche.no>
This commit is contained in:
volpino 2012-08-07 09:39:49 +02:00
parent b03541c97d
commit 952d9505b2
18 changed files with 105 additions and 49 deletions

View File

@ -299,9 +299,9 @@ def main():
except Exception as err:
import traceback
print ('-'*60)
print ('-' * 60)
traceback.print_exc(file=sys.stderr)
print ('-'*60)
print ('-' * 60)
output.eerror('%s: %s' % (query, str(err)))
exit_helper(1)

View File

@ -178,7 +178,8 @@ def patch_metadata(package, watch_data, diff=False):
for opt in opts.split(","):
opt_name, opt_value = opt.split("=", 1)
if opt_name in valid:
if opt_name == "uversionmangle": opt_name = "versionmangle"
if opt_name == "uversionmangle":
opt_name = "versionmangle"
cleaned_opts.append('%s="%s"' % (opt_name, opt_value))
opts = " ".join(cleaned_opts)

View File

@ -43,8 +43,8 @@ class ScanMetadata(object):
pkg = self.get_package(query)
try:
version = Version.objects.filter(package=pkg).values('metadata_path')\
.order_by('version', 'revision')[0]
version = Version.objects.filter(package=pkg).\
values('metadata_path').order_by('version', 'revision')[0]
except IndexError:
return pkg, None

View File

@ -35,9 +35,11 @@ def group_one(task, seq, *args, **kwargs):
for elem in seq:
if attr_name:
kwargs[attr_name] = elem
tasks.append(task.subtask(args=list(args), kwargs=dict(kwargs), immutable=True))
tasks.append(task.subtask(args=list(args), kwargs=dict(kwargs),
immutable=True))
else:
tasks.append(task.subtask(args=[elem] + list(args), kwargs=dict(kwargs), immutable=True))
tasks.append(task.subtask(args=[elem] + list(args),
kwargs=dict(kwargs), immutable=True))
return group(tasks)
@ -49,7 +51,8 @@ def group_chunks(task, seq, n, *args, **kwargs):
tasks = []
for i in xrange(0, len(seq), n):
tasks.append(
task.subtask(args=[seq[i:i + n]] + list(args), kwargs=kwargs, immutable=True)
task.subtask(args=[seq[i:i + n]] + list(args), kwargs=kwargs,
immutable=True)
)
return group(tasks)
@ -169,7 +172,8 @@ def update_portage(packages=None):
""" Workaround for celery bug when chaining groups """
update_portage_trees()
scan_portage(packages=[], purge_packages=True, purge_versions=True, prefetch=True)
scan_portage(packages=[], purge_packages=True, purge_versions=True,
prefetch=True)
scan_metadata(packages=[], populate=True)
update_counters(fast=False)
@ -250,7 +254,9 @@ def consume_refresh_queue(locked=False):
if RefreshPackageQuery.objects.count():
logger.info('Restarting myself in 60s')
lock()
consume_refresh_queue.apply_async(kwargs={'locked':True}, countdown=60)
consume_refresh_queue.apply_async(
kwargs={'locked': True}, countdown=60
)
admin_tasks = [
regen_rrds,

View File

@ -399,9 +399,14 @@ def accounts_index(request):
maintainers = sorted(get_account_maintainers(request.user),
key=upstream_k, reverse=True)
m_upstream = sum([upstream_k(c) for c in maintainers])
packages = sorted(get_profile(user).packages.all(),
key=lambda p: p.n_versions - p.n_packaged - p.n_overlay, reverse=True)
p_upstream = sum([c.n_versions - c.n_packaged - c.n_overlay for c in packages])
packages = sorted(
get_profile(user).packages.all(),
key=lambda p: p.n_versions - p.n_packaged - p.n_overlay,
reverse=True
)
p_upstream = sum(
[c.n_versions - c.n_packaged - c.n_overlay for c in packages]
)
return {
"categories": categories, "categories_upstream": c_upstream,
"herds": herds, "herds_upstream": h_upstream,

View File

@ -1,12 +1,12 @@
import os, sys
import os
import sys
import pkgutil
from euscan import CONFIG, output
import euscan.mangling
from gentoolkit.metadata import MetaData
handlers = {'package' : [], 'url' : [], 'all' : {}}
handlers = {'package': [], 'url': [], 'all': {}}
# autoimport all modules in this directory and append them to handlers list
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
@ -19,6 +19,7 @@ for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
handlers['package'].append(module)
handlers['all'][module.HANDLER_NAME] = module
# sort handlers by priority
def sort_handlers(handlers):
return sorted(
@ -30,6 +31,7 @@ def sort_handlers(handlers):
handlers['package'] = sort_handlers(handlers['package'])
handlers['url'] = sort_handlers(handlers['url'])
def find_best_handler(kind, pkg, *args):
"""
Find the best handler for the given package
@ -39,6 +41,7 @@ def find_best_handler(kind, pkg, *args):
return handler
return None
def find_handlers(kind, names):
ret = []
@ -49,12 +52,14 @@ def find_handlers(kind, names):
return ret
def get_metadata(pkg):
metadata = {}
pkg_metadata = None
meta_override = os.path.join('metadata', pkg.category, pkg.name, 'metadata.xml')
meta_override = os.path.join('metadata', pkg.category, pkg.name,
'metadata.xml')
try:
if os.path.exists(meta_override):
@ -99,10 +104,11 @@ def get_metadata(pkg):
if not metadata[handler][i]['data']:
metadata[handler][i]['data'] = node.text
else:
metadata[handler] = [{'type' : handler, 'data' : node.text }]
metadata[handler] = [{'type': handler, 'data': node.text}]
return metadata
def scan_pkg(pkg_handler, pkg, options, on_progress=None):
versions = []
@ -117,6 +123,7 @@ def scan_pkg(pkg_handler, pkg, options, on_progress=None):
return versions
def scan_url(pkg, urls, options, on_progress=None):
versions = []
@ -158,6 +165,7 @@ def scan_url(pkg, urls, options, on_progress=None):
return versions
def scan(pkg, urls, on_progress=None):
"""
Scans upstream for the given package.
@ -174,7 +182,8 @@ def scan(pkg, urls, on_progress=None):
pkg_handlers = find_handlers('package', metadata.keys())
if not pkg_handlers:
pkg_handler = find_best_handler('package', pkg)
if pkg_handler: pkg_handlers = [pkg_handler]
if pkg_handler:
pkg_handlers = [pkg_handler]
for pkg_handler in pkg_handlers:
options = metadata.get(pkg_handler.HANDLER_NAME, [{}])
@ -185,6 +194,7 @@ def scan(pkg, urls, on_progress=None):
return versions
def mangle(kind, name, string):
if name not in handlers['all']:
return None
@ -193,8 +203,10 @@ def mangle(kind, name, string):
return None
return getattr(handler, 'mangle_%s' % kind)(string)
def mangle_url(name, string):
return mangle('url', name, string)
def mangle_version(name, string):
return mangle('version', name, string)

View File

@ -11,9 +11,11 @@ PRIORITY = 90
_cpan_package_name_re = re.compile("mirror://cpan/authors/.*/([^/.]*).*")
def can_handle(pkg, url=None):
return url and url.startswith('mirror://cpan/')
def guess_package(cp, url):
match = _cpan_package_name_re.search(url)
@ -51,7 +53,7 @@ def mangle_version(up_pv):
if len(splitted) == 2: # Split second part is sub-groups
part = splitted.pop()
for i in xrange(0, len(part), 3):
splitted.append(part[i:i+3])
splitted.append(part[i:i + 3])
if len(splitted) == 2: # add last group if it's missing
splitted.append("0")
@ -74,6 +76,7 @@ def mangle_version(up_pv):
return pv
def cpan_mangle_version(pv):
pos = pv.find('.')
if pos <= 0:
@ -82,19 +85,22 @@ def cpan_mangle_version(pv):
up_pv = up_pv[0:pos] + '.' + up_pv[pos:]
return up_pv
def cpan_vercmp(cp, a, b):
try:
return float(a) - float(b)
except:
return helpers.simple_vercmp(a, b)
def scan_url(pkg, url, options):
cp, ver, rev = portage.pkgsplit(pkg.cpv)
remote_pkg = guess_package(cp, url)
output.einfo("Using CPAN API: %s", remote_pkg)
return scan_pkg(pkg, {'data' : remote_pkg})
return scan_pkg(pkg, {'data': remote_pkg})
def scan_pkg(pkg, options):
remote_pkg = options['data']
@ -142,7 +148,6 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
continue
url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
version['cpanid'][0],
version['cpanid'][0:1],

View File

@ -135,7 +135,8 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
versions.append((path, pv, HANDLER_NAME, confidence))
if steps:
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url, options)
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
options)
versions.extend(ret)
return versions

View File

@ -8,6 +8,7 @@ HANDLER_NAME = "kde"
def can_handle(pkg, url):
return url and url.startswith('mirror://kde/')
def clean_results(results):
ret = []
@ -30,7 +31,8 @@ def scan_url(pkg, url, options):
results = generic.brute_force(pkg.cpv, url)
if generic.startswith('mirror://kde/unstable/'):
url = generic.replace('mirror://kde/unstable/', 'mirror://kde/stable/')
url = generic.replace('mirror://kde/unstable/',
'mirror://kde/stable/')
results += generic.brute_force(pkg.cpv, url)
return clean_results(results)

View File

@ -4,6 +4,7 @@ HANDLER_NAME = "pear"
CONFIDENCE = 100
PRIORITY = 90
def can_handle(pkg, url=None):
return url and url.startswith('http://%s.php.net/get/' % HANDLER_NAME)

View File

@ -28,7 +28,7 @@ def scan_url(pkg, url, options):
'http://wiki.python.org/moin/PyPiXmlRpc'
package = guess_package(pkg.cpv, url)
return scan_kg(pkg, [package])
return scan_pkg(pkg, [package])
def scan_pkg(pkg, options):
@ -52,6 +52,7 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, ver, pv):
continue
urls = client.release_urls(package, up_pv)
urls = " ".join([mangling.mangle_url(infos['url'], options) for infos in urls])
urls = " ".join([mangling.mangle_url(infos['url'], options)
for infos in urls])
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
return ret

View File

@ -41,7 +41,7 @@ def scan_url(pkg, url, options):
output.einfo("Using RubyGem API: %s" % gem)
return scan_pkg(pkg, {'data' : gem})
return scan_pkg(pkg, {'data': gem})
def scan_pkg(pkg, options):

View File

@ -1,8 +1,6 @@
import re
import urllib2
import portage
import generic
from euscan import output, helpers
@ -14,9 +12,11 @@ CONFIDENCE = 100.0
is_pattern = r"\([^\/]+\)"
def can_handle(*args):
return False
def handle_directory_patterns(base, file_pattern):
"""
Directory pattern matching
@ -55,6 +55,7 @@ def handle_directory_patterns(base, file_pattern):
return [("/".join((basedir, path, final)), file_pattern)
for _, path in scan_data]
def read_options(options):
try:
base, file_pattern = options['data'].split(" ")[:2]
@ -75,6 +76,7 @@ def read_options(options):
return base, file_pattern
def scan_pkg(pkg, options):
output.einfo("Using watch data")
@ -95,4 +97,3 @@ def scan_pkg(pkg, options):
)
return results

View File

@ -406,6 +406,7 @@ def basedir_from_template(template):
return template[0:idx]
def generate_scan_paths(url):
prefix, chunks = url.split('://')
chunks = chunks.split('/')

View File

@ -2,6 +2,7 @@ import re
import euscan.handlers
def apply_mangling_rule(mangle, string):
# convert regex from perl format to python format
# there are some regex in this format: s/pattern/replacement/
@ -16,6 +17,7 @@ def apply_mangling_rule(mangle, string):
return re.sub(pattern, repl, string)
def apply_mangling_rules(kind, rules, string):
"""
Apply multiple mangling rules (both sed-like and handlers)
@ -43,12 +45,15 @@ def apply_mangling_rules(kind, rules, string):
return string
def mangle_version(up_pv, options):
return apply_mangling_rules('versionmangle', options, up_pv)
def mangle_url(url, options):
return apply_mangling_rules('downloadurlmangle', options, url)
# Stolen from g-pypi
def gentoo_mangle_version(up_pv):
"""Convert PV to MY_PV if needed

View File

@ -12,6 +12,7 @@ from gentoolkit import pprinter as pp
mirrors_ = None
class ProgressHandler(object):
def __init__(self, progress_bar):
self.curval = 0
@ -97,6 +98,7 @@ def to_ebuild_uri(cpv, url):
url = url.replace(src, '${%s}' % dst)
return url
def load_mirrors():
import random
@ -106,6 +108,7 @@ def load_mirrors():
for mirror_name in mirrors_:
random.shuffle(mirrors_[mirror_name])
def from_mirror(url):
if not url.startswith('mirror://'):
return url
@ -120,12 +123,13 @@ def from_mirror(url):
return url
def to_mirror(url):
if not mirrors_:
load_mirrors()
for mirror_name in mirrors_:
for mirror_url in mirrors[mirror_name]:
for mirror_url in mirrors_[mirror_name]:
if url.startswith(mirror_url):
url_part = url.split(mirror_url)[1]
return "mirror://%s%s%s" % (

View File

@ -5,7 +5,6 @@ import sys
from datetime import datetime
import portage
from portage.dbapi import porttree
import gentoolkit.pprinter as pp
from gentoolkit.query import Query
@ -44,6 +43,7 @@ def filter_versions(cp, versions):
for version in filtered
]
def parse_src_uri(uris):
ret = {}
@ -59,17 +59,18 @@ def parse_src_uri(uris):
if uris and uris[-1] == "->":
operator = uris.pop()
file = uris.pop()
file_ = uris.pop()
else:
file = os.path.basename(uri)
file_ = os.path.basename(uri)
if file not in ret:
ret[file] = []
if file_ not in ret:
ret[file_] = []
ret[file].append(uri)
ret[file_].append(uri)
return ret
def reload_gentoolkit():
import gentoolkit
@ -86,6 +87,7 @@ def reload_gentoolkit():
if hasattr(gentoolkit.query, 'PORTDB'):
gentoolkit.query.PORTDB = PORTDB
def scan_upstream(query, on_progress=None):
"""
Scans the upstream searching new versions for the given query
@ -152,7 +154,9 @@ def scan_upstream(query, on_progress=None):
"ebuild", pp.path(os.path.normpath(ebuild_path))
)
uris, homepage, description = pkg.environment(('SRC_URI', 'HOMEPAGE', 'DESCRIPTION'))
uris, homepage, description = pkg.environment(
('SRC_URI', 'HOMEPAGE', 'DESCRIPTION')
)
output.metadata("repository", pkg.repo_name())
output.metadata("homepage", homepage)

View File

@ -1,11 +1,14 @@
import re
def is_version_type_stable(version_type):
return version_type not in ("alpha", "beta", "pre", "rc")
def is_version_stable(version):
return is_version_type_stable(get_version_type(version))
def get_version_type(version):
types = []
gentoo_types = ("alpha", "beta", "pre", "rc", "p")
@ -20,20 +23,23 @@ def get_version_type(version):
# Stolen from pkg_resources, but importing it is not a good idea
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
replace = \
{'pre': 'c', 'preview': 'c', '-': 'final-', 'rc': 'c', 'dev': '@'}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part,part)
if not part or part=='.':
part = replace(part, part)
if not part or part == '.':
continue
if part[:1] in '0123456789':
yield part.zfill(8) # pad for numeric comparison
else:
yield '*'+part
yield '*' + part
yield '*final' # ensure that alpha/beta/candidate are before final
def parse_version(s):
"""Convert a version string to a chronologically-sortable key
@ -68,10 +74,11 @@ def parse_version(s):
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
if part<'*final': # remove '-' before a prerelease tag
while parts and parts[-1]=='*final-': parts.pop()
if part < '*final': # remove '-' before a prerelease tag
while parts and parts[-1] == '*final-':
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1]=='00000000':
while parts and parts[-1] == '00000000':
parts.pop()
parts.append(part)
return tuple(parts)