euscan/euscawww: Removing unused imports, fixed some typos and PEP8 cleaning
Signed-off-by: volpino <fox91@anche.no>
This commit is contained in:
parent
b03541c97d
commit
952d9505b2
@ -299,9 +299,9 @@ def main():
|
|||||||
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
import traceback
|
import traceback
|
||||||
print ('-'*60)
|
print ('-' * 60)
|
||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
print ('-'*60)
|
print ('-' * 60)
|
||||||
|
|
||||||
output.eerror('%s: %s' % (query, str(err)))
|
output.eerror('%s: %s' % (query, str(err)))
|
||||||
exit_helper(1)
|
exit_helper(1)
|
||||||
|
@ -178,7 +178,8 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
for opt in opts.split(","):
|
for opt in opts.split(","):
|
||||||
opt_name, opt_value = opt.split("=", 1)
|
opt_name, opt_value = opt.split("=", 1)
|
||||||
if opt_name in valid:
|
if opt_name in valid:
|
||||||
if opt_name == "uversionmangle": opt_name = "versionmangle"
|
if opt_name == "uversionmangle":
|
||||||
|
opt_name = "versionmangle"
|
||||||
cleaned_opts.append('%s="%s"' % (opt_name, opt_value))
|
cleaned_opts.append('%s="%s"' % (opt_name, opt_value))
|
||||||
opts = " ".join(cleaned_opts)
|
opts = " ".join(cleaned_opts)
|
||||||
|
|
||||||
|
@ -43,8 +43,8 @@ class ScanMetadata(object):
|
|||||||
pkg = self.get_package(query)
|
pkg = self.get_package(query)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
version = Version.objects.filter(package=pkg).values('metadata_path')\
|
version = Version.objects.filter(package=pkg).\
|
||||||
.order_by('version', 'revision')[0]
|
values('metadata_path').order_by('version', 'revision')[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return pkg, None
|
return pkg, None
|
||||||
|
|
||||||
|
@ -35,9 +35,11 @@ def group_one(task, seq, *args, **kwargs):
|
|||||||
for elem in seq:
|
for elem in seq:
|
||||||
if attr_name:
|
if attr_name:
|
||||||
kwargs[attr_name] = elem
|
kwargs[attr_name] = elem
|
||||||
tasks.append(task.subtask(args=list(args), kwargs=dict(kwargs), immutable=True))
|
tasks.append(task.subtask(args=list(args), kwargs=dict(kwargs),
|
||||||
|
immutable=True))
|
||||||
else:
|
else:
|
||||||
tasks.append(task.subtask(args=[elem] + list(args), kwargs=dict(kwargs), immutable=True))
|
tasks.append(task.subtask(args=[elem] + list(args),
|
||||||
|
kwargs=dict(kwargs), immutable=True))
|
||||||
|
|
||||||
return group(tasks)
|
return group(tasks)
|
||||||
|
|
||||||
@ -49,7 +51,8 @@ def group_chunks(task, seq, n, *args, **kwargs):
|
|||||||
tasks = []
|
tasks = []
|
||||||
for i in xrange(0, len(seq), n):
|
for i in xrange(0, len(seq), n):
|
||||||
tasks.append(
|
tasks.append(
|
||||||
task.subtask(args=[seq[i:i + n]] + list(args), kwargs=kwargs, immutable=True)
|
task.subtask(args=[seq[i:i + n]] + list(args), kwargs=kwargs,
|
||||||
|
immutable=True)
|
||||||
)
|
)
|
||||||
return group(tasks)
|
return group(tasks)
|
||||||
|
|
||||||
@ -169,7 +172,8 @@ def update_portage(packages=None):
|
|||||||
|
|
||||||
""" Workaround for celery bug when chaining groups """
|
""" Workaround for celery bug when chaining groups """
|
||||||
update_portage_trees()
|
update_portage_trees()
|
||||||
scan_portage(packages=[], purge_packages=True, purge_versions=True, prefetch=True)
|
scan_portage(packages=[], purge_packages=True, purge_versions=True,
|
||||||
|
prefetch=True)
|
||||||
scan_metadata(packages=[], populate=True)
|
scan_metadata(packages=[], populate=True)
|
||||||
update_counters(fast=False)
|
update_counters(fast=False)
|
||||||
|
|
||||||
@ -250,7 +254,9 @@ def consume_refresh_queue(locked=False):
|
|||||||
if RefreshPackageQuery.objects.count():
|
if RefreshPackageQuery.objects.count():
|
||||||
logger.info('Restarting myself in 60s')
|
logger.info('Restarting myself in 60s')
|
||||||
lock()
|
lock()
|
||||||
consume_refresh_queue.apply_async(kwargs={'locked':True}, countdown=60)
|
consume_refresh_queue.apply_async(
|
||||||
|
kwargs={'locked': True}, countdown=60
|
||||||
|
)
|
||||||
|
|
||||||
admin_tasks = [
|
admin_tasks = [
|
||||||
regen_rrds,
|
regen_rrds,
|
||||||
|
@ -399,9 +399,14 @@ def accounts_index(request):
|
|||||||
maintainers = sorted(get_account_maintainers(request.user),
|
maintainers = sorted(get_account_maintainers(request.user),
|
||||||
key=upstream_k, reverse=True)
|
key=upstream_k, reverse=True)
|
||||||
m_upstream = sum([upstream_k(c) for c in maintainers])
|
m_upstream = sum([upstream_k(c) for c in maintainers])
|
||||||
packages = sorted(get_profile(user).packages.all(),
|
packages = sorted(
|
||||||
key=lambda p: p.n_versions - p.n_packaged - p.n_overlay, reverse=True)
|
get_profile(user).packages.all(),
|
||||||
p_upstream = sum([c.n_versions - c.n_packaged - c.n_overlay for c in packages])
|
key=lambda p: p.n_versions - p.n_packaged - p.n_overlay,
|
||||||
|
reverse=True
|
||||||
|
)
|
||||||
|
p_upstream = sum(
|
||||||
|
[c.n_versions - c.n_packaged - c.n_overlay for c in packages]
|
||||||
|
)
|
||||||
return {
|
return {
|
||||||
"categories": categories, "categories_upstream": c_upstream,
|
"categories": categories, "categories_upstream": c_upstream,
|
||||||
"herds": herds, "herds_upstream": h_upstream,
|
"herds": herds, "herds_upstream": h_upstream,
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import os, sys
|
import os
|
||||||
|
import sys
|
||||||
import pkgutil
|
import pkgutil
|
||||||
|
|
||||||
from euscan import CONFIG, output
|
from euscan import CONFIG, output
|
||||||
import euscan.mangling
|
|
||||||
|
|
||||||
from gentoolkit.metadata import MetaData
|
from gentoolkit.metadata import MetaData
|
||||||
|
|
||||||
handlers = {'package' : [], 'url' : [], 'all' : {}}
|
handlers = {'package': [], 'url': [], 'all': {}}
|
||||||
|
|
||||||
# autoimport all modules in this directory and append them to handlers list
|
# autoimport all modules in this directory and append them to handlers list
|
||||||
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
||||||
@ -19,6 +19,7 @@ for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
|||||||
handlers['package'].append(module)
|
handlers['package'].append(module)
|
||||||
handlers['all'][module.HANDLER_NAME] = module
|
handlers['all'][module.HANDLER_NAME] = module
|
||||||
|
|
||||||
|
|
||||||
# sort handlers by priority
|
# sort handlers by priority
|
||||||
def sort_handlers(handlers):
|
def sort_handlers(handlers):
|
||||||
return sorted(
|
return sorted(
|
||||||
@ -30,6 +31,7 @@ def sort_handlers(handlers):
|
|||||||
handlers['package'] = sort_handlers(handlers['package'])
|
handlers['package'] = sort_handlers(handlers['package'])
|
||||||
handlers['url'] = sort_handlers(handlers['url'])
|
handlers['url'] = sort_handlers(handlers['url'])
|
||||||
|
|
||||||
|
|
||||||
def find_best_handler(kind, pkg, *args):
|
def find_best_handler(kind, pkg, *args):
|
||||||
"""
|
"""
|
||||||
Find the best handler for the given package
|
Find the best handler for the given package
|
||||||
@ -39,6 +41,7 @@ def find_best_handler(kind, pkg, *args):
|
|||||||
return handler
|
return handler
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def find_handlers(kind, names):
|
def find_handlers(kind, names):
|
||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
@ -49,12 +52,14 @@ def find_handlers(kind, names):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def get_metadata(pkg):
|
def get_metadata(pkg):
|
||||||
metadata = {}
|
metadata = {}
|
||||||
|
|
||||||
pkg_metadata = None
|
pkg_metadata = None
|
||||||
|
|
||||||
meta_override = os.path.join('metadata', pkg.category, pkg.name, 'metadata.xml')
|
meta_override = os.path.join('metadata', pkg.category, pkg.name,
|
||||||
|
'metadata.xml')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if os.path.exists(meta_override):
|
if os.path.exists(meta_override):
|
||||||
@ -99,10 +104,11 @@ def get_metadata(pkg):
|
|||||||
if not metadata[handler][i]['data']:
|
if not metadata[handler][i]['data']:
|
||||||
metadata[handler][i]['data'] = node.text
|
metadata[handler][i]['data'] = node.text
|
||||||
else:
|
else:
|
||||||
metadata[handler] = [{'type' : handler, 'data' : node.text }]
|
metadata[handler] = [{'type': handler, 'data': node.text}]
|
||||||
|
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg_handler, pkg, options, on_progress=None):
|
def scan_pkg(pkg_handler, pkg, options, on_progress=None):
|
||||||
versions = []
|
versions = []
|
||||||
|
|
||||||
@ -117,6 +123,7 @@ def scan_pkg(pkg_handler, pkg, options, on_progress=None):
|
|||||||
|
|
||||||
return versions
|
return versions
|
||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, urls, options, on_progress=None):
|
def scan_url(pkg, urls, options, on_progress=None):
|
||||||
versions = []
|
versions = []
|
||||||
|
|
||||||
@ -158,6 +165,7 @@ def scan_url(pkg, urls, options, on_progress=None):
|
|||||||
|
|
||||||
return versions
|
return versions
|
||||||
|
|
||||||
|
|
||||||
def scan(pkg, urls, on_progress=None):
|
def scan(pkg, urls, on_progress=None):
|
||||||
"""
|
"""
|
||||||
Scans upstream for the given package.
|
Scans upstream for the given package.
|
||||||
@ -174,7 +182,8 @@ def scan(pkg, urls, on_progress=None):
|
|||||||
pkg_handlers = find_handlers('package', metadata.keys())
|
pkg_handlers = find_handlers('package', metadata.keys())
|
||||||
if not pkg_handlers:
|
if not pkg_handlers:
|
||||||
pkg_handler = find_best_handler('package', pkg)
|
pkg_handler = find_best_handler('package', pkg)
|
||||||
if pkg_handler: pkg_handlers = [pkg_handler]
|
if pkg_handler:
|
||||||
|
pkg_handlers = [pkg_handler]
|
||||||
|
|
||||||
for pkg_handler in pkg_handlers:
|
for pkg_handler in pkg_handlers:
|
||||||
options = metadata.get(pkg_handler.HANDLER_NAME, [{}])
|
options = metadata.get(pkg_handler.HANDLER_NAME, [{}])
|
||||||
@ -185,6 +194,7 @@ def scan(pkg, urls, on_progress=None):
|
|||||||
|
|
||||||
return versions
|
return versions
|
||||||
|
|
||||||
|
|
||||||
def mangle(kind, name, string):
|
def mangle(kind, name, string):
|
||||||
if name not in handlers['all']:
|
if name not in handlers['all']:
|
||||||
return None
|
return None
|
||||||
@ -193,8 +203,10 @@ def mangle(kind, name, string):
|
|||||||
return None
|
return None
|
||||||
return getattr(handler, 'mangle_%s' % kind)(string)
|
return getattr(handler, 'mangle_%s' % kind)(string)
|
||||||
|
|
||||||
|
|
||||||
def mangle_url(name, string):
|
def mangle_url(name, string):
|
||||||
return mangle('url', name, string)
|
return mangle('url', name, string)
|
||||||
|
|
||||||
|
|
||||||
def mangle_version(name, string):
|
def mangle_version(name, string):
|
||||||
return mangle('version', name, string)
|
return mangle('version', name, string)
|
||||||
|
@ -11,9 +11,11 @@ PRIORITY = 90
|
|||||||
|
|
||||||
_cpan_package_name_re = re.compile("mirror://cpan/authors/.*/([^/.]*).*")
|
_cpan_package_name_re = re.compile("mirror://cpan/authors/.*/([^/.]*).*")
|
||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('mirror://cpan/')
|
return url and url.startswith('mirror://cpan/')
|
||||||
|
|
||||||
|
|
||||||
def guess_package(cp, url):
|
def guess_package(cp, url):
|
||||||
match = _cpan_package_name_re.search(url)
|
match = _cpan_package_name_re.search(url)
|
||||||
|
|
||||||
@ -51,7 +53,7 @@ def mangle_version(up_pv):
|
|||||||
if len(splitted) == 2: # Split second part is sub-groups
|
if len(splitted) == 2: # Split second part is sub-groups
|
||||||
part = splitted.pop()
|
part = splitted.pop()
|
||||||
for i in xrange(0, len(part), 3):
|
for i in xrange(0, len(part), 3):
|
||||||
splitted.append(part[i:i+3])
|
splitted.append(part[i:i + 3])
|
||||||
|
|
||||||
if len(splitted) == 2: # add last group if it's missing
|
if len(splitted) == 2: # add last group if it's missing
|
||||||
splitted.append("0")
|
splitted.append("0")
|
||||||
@ -74,6 +76,7 @@ def mangle_version(up_pv):
|
|||||||
|
|
||||||
return pv
|
return pv
|
||||||
|
|
||||||
|
|
||||||
def cpan_mangle_version(pv):
|
def cpan_mangle_version(pv):
|
||||||
pos = pv.find('.')
|
pos = pv.find('.')
|
||||||
if pos <= 0:
|
if pos <= 0:
|
||||||
@ -82,19 +85,22 @@ def cpan_mangle_version(pv):
|
|||||||
up_pv = up_pv[0:pos] + '.' + up_pv[pos:]
|
up_pv = up_pv[0:pos] + '.' + up_pv[pos:]
|
||||||
return up_pv
|
return up_pv
|
||||||
|
|
||||||
|
|
||||||
def cpan_vercmp(cp, a, b):
|
def cpan_vercmp(cp, a, b):
|
||||||
try:
|
try:
|
||||||
return float(a) - float(b)
|
return float(a) - float(b)
|
||||||
except:
|
except:
|
||||||
return helpers.simple_vercmp(a, b)
|
return helpers.simple_vercmp(a, b)
|
||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||||
remote_pkg = guess_package(cp, url)
|
remote_pkg = guess_package(cp, url)
|
||||||
|
|
||||||
output.einfo("Using CPAN API: %s", remote_pkg)
|
output.einfo("Using CPAN API: %s", remote_pkg)
|
||||||
|
|
||||||
return scan_pkg(pkg, {'data' : remote_pkg})
|
return scan_pkg(pkg, {'data': remote_pkg})
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
remote_pkg = options['data']
|
remote_pkg = options['data']
|
||||||
@ -142,7 +148,6 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
|
url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
|
||||||
version['cpanid'][0],
|
version['cpanid'][0],
|
||||||
version['cpanid'][0:1],
|
version['cpanid'][0:1],
|
||||||
|
@ -135,7 +135,8 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
|||||||
versions.append((path, pv, HANDLER_NAME, confidence))
|
versions.append((path, pv, HANDLER_NAME, confidence))
|
||||||
|
|
||||||
if steps:
|
if steps:
|
||||||
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url, options)
|
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
|
||||||
|
options)
|
||||||
versions.extend(ret)
|
versions.extend(ret)
|
||||||
|
|
||||||
return versions
|
return versions
|
||||||
|
@ -8,6 +8,7 @@ HANDLER_NAME = "kde"
|
|||||||
def can_handle(pkg, url):
|
def can_handle(pkg, url):
|
||||||
return url and url.startswith('mirror://kde/')
|
return url and url.startswith('mirror://kde/')
|
||||||
|
|
||||||
|
|
||||||
def clean_results(results):
|
def clean_results(results):
|
||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
@ -30,7 +31,8 @@ def scan_url(pkg, url, options):
|
|||||||
results = generic.brute_force(pkg.cpv, url)
|
results = generic.brute_force(pkg.cpv, url)
|
||||||
|
|
||||||
if generic.startswith('mirror://kde/unstable/'):
|
if generic.startswith('mirror://kde/unstable/'):
|
||||||
url = generic.replace('mirror://kde/unstable/', 'mirror://kde/stable/')
|
url = generic.replace('mirror://kde/unstable/',
|
||||||
|
'mirror://kde/stable/')
|
||||||
results += generic.brute_force(pkg.cpv, url)
|
results += generic.brute_force(pkg.cpv, url)
|
||||||
|
|
||||||
return clean_results(results)
|
return clean_results(results)
|
||||||
|
@ -4,6 +4,7 @@ HANDLER_NAME = "pear"
|
|||||||
CONFIDENCE = 100
|
CONFIDENCE = 100
|
||||||
PRIORITY = 90
|
PRIORITY = 90
|
||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('http://%s.php.net/get/' % HANDLER_NAME)
|
return url and url.startswith('http://%s.php.net/get/' % HANDLER_NAME)
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ def scan_url(pkg, url, options):
|
|||||||
'http://wiki.python.org/moin/PyPiXmlRpc'
|
'http://wiki.python.org/moin/PyPiXmlRpc'
|
||||||
|
|
||||||
package = guess_package(pkg.cpv, url)
|
package = guess_package(pkg.cpv, url)
|
||||||
return scan_kg(pkg, [package])
|
return scan_pkg(pkg, [package])
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
@ -52,6 +52,7 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
urls = client.release_urls(package, up_pv)
|
urls = client.release_urls(package, up_pv)
|
||||||
urls = " ".join([mangling.mangle_url(infos['url'], options) for infos in urls])
|
urls = " ".join([mangling.mangle_url(infos['url'], options)
|
||||||
|
for infos in urls])
|
||||||
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
return ret
|
return ret
|
||||||
|
@ -41,7 +41,7 @@ def scan_url(pkg, url, options):
|
|||||||
|
|
||||||
output.einfo("Using RubyGem API: %s" % gem)
|
output.einfo("Using RubyGem API: %s" % gem)
|
||||||
|
|
||||||
return scan_pkg(pkg, {'data' : gem})
|
return scan_pkg(pkg, {'data': gem})
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
import re
|
import re
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
import portage
|
|
||||||
|
|
||||||
import generic
|
import generic
|
||||||
from euscan import output, helpers
|
from euscan import output, helpers
|
||||||
|
|
||||||
@ -14,9 +12,11 @@ CONFIDENCE = 100.0
|
|||||||
|
|
||||||
is_pattern = r"\([^\/]+\)"
|
is_pattern = r"\([^\/]+\)"
|
||||||
|
|
||||||
|
|
||||||
def can_handle(*args):
|
def can_handle(*args):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def handle_directory_patterns(base, file_pattern):
|
def handle_directory_patterns(base, file_pattern):
|
||||||
"""
|
"""
|
||||||
Directory pattern matching
|
Directory pattern matching
|
||||||
@ -55,6 +55,7 @@ def handle_directory_patterns(base, file_pattern):
|
|||||||
return [("/".join((basedir, path, final)), file_pattern)
|
return [("/".join((basedir, path, final)), file_pattern)
|
||||||
for _, path in scan_data]
|
for _, path in scan_data]
|
||||||
|
|
||||||
|
|
||||||
def read_options(options):
|
def read_options(options):
|
||||||
try:
|
try:
|
||||||
base, file_pattern = options['data'].split(" ")[:2]
|
base, file_pattern = options['data'].split(" ")[:2]
|
||||||
@ -75,6 +76,7 @@ def read_options(options):
|
|||||||
|
|
||||||
return base, file_pattern
|
return base, file_pattern
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
output.einfo("Using watch data")
|
output.einfo("Using watch data")
|
||||||
|
|
||||||
@ -95,4 +97,3 @@ def scan_pkg(pkg, options):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -406,6 +406,7 @@ def basedir_from_template(template):
|
|||||||
|
|
||||||
return template[0:idx]
|
return template[0:idx]
|
||||||
|
|
||||||
|
|
||||||
def generate_scan_paths(url):
|
def generate_scan_paths(url):
|
||||||
prefix, chunks = url.split('://')
|
prefix, chunks = url.split('://')
|
||||||
chunks = chunks.split('/')
|
chunks = chunks.split('/')
|
||||||
|
@ -2,6 +2,7 @@ import re
|
|||||||
|
|
||||||
import euscan.handlers
|
import euscan.handlers
|
||||||
|
|
||||||
|
|
||||||
def apply_mangling_rule(mangle, string):
|
def apply_mangling_rule(mangle, string):
|
||||||
# convert regex from perl format to python format
|
# convert regex from perl format to python format
|
||||||
# there are some regex in this format: s/pattern/replacement/
|
# there are some regex in this format: s/pattern/replacement/
|
||||||
@ -16,6 +17,7 @@ def apply_mangling_rule(mangle, string):
|
|||||||
|
|
||||||
return re.sub(pattern, repl, string)
|
return re.sub(pattern, repl, string)
|
||||||
|
|
||||||
|
|
||||||
def apply_mangling_rules(kind, rules, string):
|
def apply_mangling_rules(kind, rules, string):
|
||||||
"""
|
"""
|
||||||
Apply multiple mangling rules (both sed-like and handlers)
|
Apply multiple mangling rules (both sed-like and handlers)
|
||||||
@ -43,12 +45,15 @@ def apply_mangling_rules(kind, rules, string):
|
|||||||
|
|
||||||
return string
|
return string
|
||||||
|
|
||||||
|
|
||||||
def mangle_version(up_pv, options):
|
def mangle_version(up_pv, options):
|
||||||
return apply_mangling_rules('versionmangle', options, up_pv)
|
return apply_mangling_rules('versionmangle', options, up_pv)
|
||||||
|
|
||||||
|
|
||||||
def mangle_url(url, options):
|
def mangle_url(url, options):
|
||||||
return apply_mangling_rules('downloadurlmangle', options, url)
|
return apply_mangling_rules('downloadurlmangle', options, url)
|
||||||
|
|
||||||
|
|
||||||
# Stolen from g-pypi
|
# Stolen from g-pypi
|
||||||
def gentoo_mangle_version(up_pv):
|
def gentoo_mangle_version(up_pv):
|
||||||
"""Convert PV to MY_PV if needed
|
"""Convert PV to MY_PV if needed
|
||||||
|
@ -12,6 +12,7 @@ from gentoolkit import pprinter as pp
|
|||||||
|
|
||||||
mirrors_ = None
|
mirrors_ = None
|
||||||
|
|
||||||
|
|
||||||
class ProgressHandler(object):
|
class ProgressHandler(object):
|
||||||
def __init__(self, progress_bar):
|
def __init__(self, progress_bar):
|
||||||
self.curval = 0
|
self.curval = 0
|
||||||
@ -97,6 +98,7 @@ def to_ebuild_uri(cpv, url):
|
|||||||
url = url.replace(src, '${%s}' % dst)
|
url = url.replace(src, '${%s}' % dst)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
def load_mirrors():
|
def load_mirrors():
|
||||||
import random
|
import random
|
||||||
|
|
||||||
@ -106,6 +108,7 @@ def load_mirrors():
|
|||||||
for mirror_name in mirrors_:
|
for mirror_name in mirrors_:
|
||||||
random.shuffle(mirrors_[mirror_name])
|
random.shuffle(mirrors_[mirror_name])
|
||||||
|
|
||||||
|
|
||||||
def from_mirror(url):
|
def from_mirror(url):
|
||||||
if not url.startswith('mirror://'):
|
if not url.startswith('mirror://'):
|
||||||
return url
|
return url
|
||||||
@ -120,12 +123,13 @@ def from_mirror(url):
|
|||||||
|
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
def to_mirror(url):
|
def to_mirror(url):
|
||||||
if not mirrors_:
|
if not mirrors_:
|
||||||
load_mirrors()
|
load_mirrors()
|
||||||
|
|
||||||
for mirror_name in mirrors_:
|
for mirror_name in mirrors_:
|
||||||
for mirror_url in mirrors[mirror_name]:
|
for mirror_url in mirrors_[mirror_name]:
|
||||||
if url.startswith(mirror_url):
|
if url.startswith(mirror_url):
|
||||||
url_part = url.split(mirror_url)[1]
|
url_part = url.split(mirror_url)[1]
|
||||||
return "mirror://%s%s%s" % (
|
return "mirror://%s%s%s" % (
|
||||||
|
@ -5,7 +5,6 @@ import sys
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import portage
|
import portage
|
||||||
from portage.dbapi import porttree
|
|
||||||
|
|
||||||
import gentoolkit.pprinter as pp
|
import gentoolkit.pprinter as pp
|
||||||
from gentoolkit.query import Query
|
from gentoolkit.query import Query
|
||||||
@ -44,6 +43,7 @@ def filter_versions(cp, versions):
|
|||||||
for version in filtered
|
for version in filtered
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def parse_src_uri(uris):
|
def parse_src_uri(uris):
|
||||||
ret = {}
|
ret = {}
|
||||||
|
|
||||||
@ -59,17 +59,18 @@ def parse_src_uri(uris):
|
|||||||
|
|
||||||
if uris and uris[-1] == "->":
|
if uris and uris[-1] == "->":
|
||||||
operator = uris.pop()
|
operator = uris.pop()
|
||||||
file = uris.pop()
|
file_ = uris.pop()
|
||||||
else:
|
else:
|
||||||
file = os.path.basename(uri)
|
file_ = os.path.basename(uri)
|
||||||
|
|
||||||
if file not in ret:
|
if file_ not in ret:
|
||||||
ret[file] = []
|
ret[file_] = []
|
||||||
|
|
||||||
ret[file].append(uri)
|
ret[file_].append(uri)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def reload_gentoolkit():
|
def reload_gentoolkit():
|
||||||
import gentoolkit
|
import gentoolkit
|
||||||
|
|
||||||
@ -86,6 +87,7 @@ def reload_gentoolkit():
|
|||||||
if hasattr(gentoolkit.query, 'PORTDB'):
|
if hasattr(gentoolkit.query, 'PORTDB'):
|
||||||
gentoolkit.query.PORTDB = PORTDB
|
gentoolkit.query.PORTDB = PORTDB
|
||||||
|
|
||||||
|
|
||||||
def scan_upstream(query, on_progress=None):
|
def scan_upstream(query, on_progress=None):
|
||||||
"""
|
"""
|
||||||
Scans the upstream searching new versions for the given query
|
Scans the upstream searching new versions for the given query
|
||||||
@ -152,7 +154,9 @@ def scan_upstream(query, on_progress=None):
|
|||||||
"ebuild", pp.path(os.path.normpath(ebuild_path))
|
"ebuild", pp.path(os.path.normpath(ebuild_path))
|
||||||
)
|
)
|
||||||
|
|
||||||
uris, homepage, description = pkg.environment(('SRC_URI', 'HOMEPAGE', 'DESCRIPTION'))
|
uris, homepage, description = pkg.environment(
|
||||||
|
('SRC_URI', 'HOMEPAGE', 'DESCRIPTION')
|
||||||
|
)
|
||||||
|
|
||||||
output.metadata("repository", pkg.repo_name())
|
output.metadata("repository", pkg.repo_name())
|
||||||
output.metadata("homepage", homepage)
|
output.metadata("homepage", homepage)
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
def is_version_type_stable(version_type):
|
def is_version_type_stable(version_type):
|
||||||
return version_type not in ("alpha", "beta", "pre", "rc")
|
return version_type not in ("alpha", "beta", "pre", "rc")
|
||||||
|
|
||||||
|
|
||||||
def is_version_stable(version):
|
def is_version_stable(version):
|
||||||
return is_version_type_stable(get_version_type(version))
|
return is_version_type_stable(get_version_type(version))
|
||||||
|
|
||||||
|
|
||||||
def get_version_type(version):
|
def get_version_type(version):
|
||||||
types = []
|
types = []
|
||||||
gentoo_types = ("alpha", "beta", "pre", "rc", "p")
|
gentoo_types = ("alpha", "beta", "pre", "rc", "p")
|
||||||
@ -20,20 +23,23 @@ def get_version_type(version):
|
|||||||
# Stolen from pkg_resources, but importing it is not a good idea
|
# Stolen from pkg_resources, but importing it is not a good idea
|
||||||
|
|
||||||
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
|
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
|
||||||
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
|
replace = \
|
||||||
|
{'pre': 'c', 'preview': 'c', '-': 'final-', 'rc': 'c', 'dev': '@'}.get
|
||||||
|
|
||||||
|
|
||||||
def _parse_version_parts(s):
|
def _parse_version_parts(s):
|
||||||
for part in component_re.split(s):
|
for part in component_re.split(s):
|
||||||
part = replace(part,part)
|
part = replace(part, part)
|
||||||
if not part or part=='.':
|
if not part or part == '.':
|
||||||
continue
|
continue
|
||||||
if part[:1] in '0123456789':
|
if part[:1] in '0123456789':
|
||||||
yield part.zfill(8) # pad for numeric comparison
|
yield part.zfill(8) # pad for numeric comparison
|
||||||
else:
|
else:
|
||||||
yield '*'+part
|
yield '*' + part
|
||||||
|
|
||||||
yield '*final' # ensure that alpha/beta/candidate are before final
|
yield '*final' # ensure that alpha/beta/candidate are before final
|
||||||
|
|
||||||
|
|
||||||
def parse_version(s):
|
def parse_version(s):
|
||||||
"""Convert a version string to a chronologically-sortable key
|
"""Convert a version string to a chronologically-sortable key
|
||||||
|
|
||||||
@ -68,10 +74,11 @@ def parse_version(s):
|
|||||||
parts = []
|
parts = []
|
||||||
for part in _parse_version_parts(s.lower()):
|
for part in _parse_version_parts(s.lower()):
|
||||||
if part.startswith('*'):
|
if part.startswith('*'):
|
||||||
if part<'*final': # remove '-' before a prerelease tag
|
if part < '*final': # remove '-' before a prerelease tag
|
||||||
while parts and parts[-1]=='*final-': parts.pop()
|
while parts and parts[-1] == '*final-':
|
||||||
|
parts.pop()
|
||||||
# remove trailing zeros from each series of numeric parts
|
# remove trailing zeros from each series of numeric parts
|
||||||
while parts and parts[-1]=='00000000':
|
while parts and parts[-1] == '00000000':
|
||||||
parts.pop()
|
parts.pop()
|
||||||
parts.append(part)
|
parts.append(part)
|
||||||
return tuple(parts)
|
return tuple(parts)
|
||||||
|
Loading…
Reference in New Issue
Block a user