Run black on project
Signed-off-by: Alfred Wingate <parona@protonmail.com>
This commit is contained in:
@ -6,31 +6,27 @@ from euscan import CONFIG, output
|
||||
|
||||
from portage.xml.metadata import MetaDataXML
|
||||
|
||||
handlers = {'package': [], 'url': [], 'all': {}}
|
||||
handlers = {"package": [], "url": [], "all": {}}
|
||||
|
||||
# autoimport all modules in this directory and append them to handlers list
|
||||
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
||||
|
||||
module = loader.find_spec(module_name).loader.load_module(module_name)
|
||||
if not hasattr(module, 'HANDLER_NAME'):
|
||||
if not hasattr(module, "HANDLER_NAME"):
|
||||
continue
|
||||
if hasattr(module, 'scan_url'):
|
||||
handlers['url'].append(module)
|
||||
if hasattr(module, 'scan_pkg'):
|
||||
handlers['package'].append(module)
|
||||
handlers['all'][module.HANDLER_NAME] = module
|
||||
if hasattr(module, "scan_url"):
|
||||
handlers["url"].append(module)
|
||||
if hasattr(module, "scan_pkg"):
|
||||
handlers["package"].append(module)
|
||||
handlers["all"][module.HANDLER_NAME] = module
|
||||
|
||||
|
||||
# sort handlers by priority
|
||||
def sort_handlers(handlers):
|
||||
return sorted(
|
||||
handlers,
|
||||
key=lambda handler: handler.PRIORITY,
|
||||
reverse=True
|
||||
)
|
||||
return sorted(handlers, key=lambda handler: handler.PRIORITY, reverse=True)
|
||||
|
||||
handlers['package'] = sort_handlers(handlers['package'])
|
||||
handlers['url'] = sort_handlers(handlers['url'])
|
||||
|
||||
handlers["package"] = sort_handlers(handlers["package"])
|
||||
handlers["url"] = sort_handlers(handlers["url"])
|
||||
|
||||
|
||||
def find_best_handler(kind, pkg, *args):
|
||||
@ -38,8 +34,9 @@ def find_best_handler(kind, pkg, *args):
|
||||
Find the best handler for the given package
|
||||
"""
|
||||
for handler in handlers[kind]:
|
||||
if (handler.HANDLER_NAME not in CONFIG["handlers-exclude"] and
|
||||
handler.can_handle(pkg, *args)):
|
||||
if handler.HANDLER_NAME not in CONFIG[
|
||||
"handlers-exclude"
|
||||
] and handler.can_handle(pkg, *args):
|
||||
return handler
|
||||
return None
|
||||
|
||||
@ -49,8 +46,8 @@ def find_handlers(kind, names):
|
||||
|
||||
for name in names:
|
||||
# Does this handler exist, and handle this kind of thing ? (pkg / url)
|
||||
if name in handlers['all'] and handlers['all'][name] in handlers[kind]:
|
||||
ret.append(handlers['all'][name])
|
||||
if name in handlers["all"] and handlers["all"][name] in handlers[kind]:
|
||||
ret.append(handlers["all"][name])
|
||||
|
||||
return ret
|
||||
|
||||
@ -60,17 +57,16 @@ def get_metadata(pkg):
|
||||
|
||||
pkg_metadata = None
|
||||
|
||||
meta_override = os.path.join('metadata', pkg.category, pkg.name,
|
||||
'metadata.xml')
|
||||
meta_override = os.path.join("metadata", pkg.category, pkg.name, "metadata.xml")
|
||||
|
||||
try:
|
||||
if os.path.exists(meta_override):
|
||||
pkg_metadata = MetaDataXML(meta_override)
|
||||
output.einfo('Using custom metadata: %s' % meta_override)
|
||||
output.einfo("Using custom metadata: %s" % meta_override)
|
||||
if not pkg_metadata:
|
||||
pkg_metadata = pkg.metadata
|
||||
except Exception as e:
|
||||
output.ewarn('Error when fetching metadata: %s' % str(e))
|
||||
output.ewarn("Error when fetching metadata: %s" % str(e))
|
||||
|
||||
if not pkg_metadata:
|
||||
return {}
|
||||
@ -79,13 +75,13 @@ def get_metadata(pkg):
|
||||
for upstream in pkg_metadata._xml_tree.findall("upstream"):
|
||||
for node in upstream.findall("watch"):
|
||||
options = dict(node.attrib)
|
||||
options['data'] = node.text
|
||||
options["data"] = node.text
|
||||
|
||||
if "type" in options:
|
||||
handler = options['type']
|
||||
handler = options["type"]
|
||||
else:
|
||||
handler = "url"
|
||||
options['type'] = "url"
|
||||
options["type"] = "url"
|
||||
|
||||
for key in ["versionmangle", "downloadurlmangle"]:
|
||||
value = options.get(key, None)
|
||||
@ -103,10 +99,10 @@ def get_metadata(pkg):
|
||||
continue
|
||||
if handler in metadata:
|
||||
for i in range(len(metadata[handler])):
|
||||
if not metadata[handler][i]['data']:
|
||||
metadata[handler][i]['data'] = node.text
|
||||
if not metadata[handler][i]["data"]:
|
||||
metadata[handler][i]["data"] = node.text
|
||||
else:
|
||||
metadata[handler] = [{'type': handler, 'data': node.text}]
|
||||
metadata[handler] = [{"type": handler, "data": node.text}]
|
||||
|
||||
return metadata
|
||||
|
||||
@ -145,24 +141,21 @@ def scan_url(pkg, urls, options, on_progress=None):
|
||||
|
||||
output.einfo("SRC_URI is '%s'" % url)
|
||||
|
||||
if '://' not in url:
|
||||
if "://" not in url:
|
||||
output.einfo("Invalid url '%s'" % url)
|
||||
continue
|
||||
|
||||
try:
|
||||
url_handler = find_best_handler('url', pkg, url)
|
||||
url_handler = find_best_handler("url", pkg, url)
|
||||
if url_handler:
|
||||
for o in options:
|
||||
versions += url_handler.scan_url(pkg, url, o)
|
||||
else:
|
||||
output.eerror("Can't find a suitable handler!")
|
||||
except Exception as e:
|
||||
output.ewarn(
|
||||
"Handler failed: [%s] %s" %
|
||||
(e.__class__.__name__, str(e))
|
||||
)
|
||||
output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, str(e)))
|
||||
|
||||
if versions and CONFIG['oneshot']:
|
||||
if versions and CONFIG["oneshot"]:
|
||||
break
|
||||
|
||||
if on_progress and progress_available > 0:
|
||||
@ -178,15 +171,15 @@ def scan(pkg, urls, on_progress=None):
|
||||
in url handling.
|
||||
"""
|
||||
|
||||
if not CONFIG['quiet'] and not CONFIG['format']:
|
||||
sys.stdout.write('\n')
|
||||
if not CONFIG["quiet"] and not CONFIG["format"]:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
metadata = get_metadata(pkg)
|
||||
versions = []
|
||||
|
||||
pkg_handlers = find_handlers('package', list(metadata.keys()))
|
||||
pkg_handlers = find_handlers("package", list(metadata.keys()))
|
||||
if not pkg_handlers:
|
||||
pkg_handler = find_best_handler('package', pkg)
|
||||
pkg_handler = find_best_handler("package", pkg)
|
||||
if pkg_handler:
|
||||
pkg_handlers = [pkg_handler]
|
||||
|
||||
@ -201,17 +194,17 @@ def scan(pkg, urls, on_progress=None):
|
||||
|
||||
|
||||
def mangle(kind, name, string):
|
||||
if name not in handlers['all']:
|
||||
if name not in handlers["all"]:
|
||||
return None
|
||||
handler = handlers['all'][name]
|
||||
if not hasattr(handler, 'mangle_%s' % kind):
|
||||
handler = handlers["all"][name]
|
||||
if not hasattr(handler, "mangle_%s" % kind):
|
||||
return None
|
||||
return getattr(handler, 'mangle_%s' % kind)(string)
|
||||
return getattr(handler, "mangle_%s" % kind)(string)
|
||||
|
||||
|
||||
def mangle_url(name, string):
|
||||
return mangle('url', name, string)
|
||||
return mangle("url", name, string)
|
||||
|
||||
|
||||
def mangle_version(name, string):
|
||||
return mangle('version', name, string)
|
||||
return mangle("version", name, string)
|
||||
|
@ -36,19 +36,13 @@ def scan_url(pkg, url, options):
|
||||
project_page = "http://developer.berlios.de/projects/%s" % project
|
||||
content = urllib.request.urlopen(project_page).read()
|
||||
|
||||
project_id = re.search(
|
||||
r"/project/filelist.php\?group_id=(\d+)",
|
||||
content
|
||||
).group(1)
|
||||
project_id = re.search(r"/project/filelist.php\?group_id=(\d+)", content).group(1)
|
||||
|
||||
base_url = (
|
||||
"http://developer.berlios.de/project/filelist.php?group_id=%s" %
|
||||
project_id
|
||||
"http://developer.berlios.de/project/filelist.php?group_id=%s" % project_id
|
||||
)
|
||||
|
||||
file_pattern = regex_from_template(
|
||||
filename.replace(ver, "${PV}")
|
||||
)
|
||||
file_pattern = regex_from_template(filename.replace(ver, "${PV}"))
|
||||
|
||||
result = url_scan(pkg, base_url, file_pattern)
|
||||
|
||||
|
@ -13,7 +13,7 @@ _cpan_package_name_re = re.compile("mirror://cpan/authors/.*/([^/.]*).*")
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and url.startswith('mirror://cpan/')
|
||||
return url and url.startswith("mirror://cpan/")
|
||||
|
||||
|
||||
def guess_package(cp, url):
|
||||
@ -24,7 +24,7 @@ def guess_package(cp, url):
|
||||
if match:
|
||||
pkg = match.group(1)
|
||||
try:
|
||||
cp, ver, rev = portage.pkgsplit('fake/' + pkg)
|
||||
cp, ver, rev = portage.pkgsplit("fake/" + pkg)
|
||||
except:
|
||||
pass
|
||||
|
||||
@ -34,7 +34,7 @@ def guess_package(cp, url):
|
||||
|
||||
|
||||
def mangle_version(up_pv):
|
||||
if up_pv.startswith('v'):
|
||||
if up_pv.startswith("v"):
|
||||
return up_pv[1:]
|
||||
|
||||
# clean
|
||||
@ -53,14 +53,14 @@ def mangle_version(up_pv):
|
||||
if len(splitted) == 2: # Split second part is sub-groups
|
||||
part = splitted.pop()
|
||||
for i in range(0, len(part), 3):
|
||||
splitted.append(part[i:i + 3])
|
||||
splitted.append(part[i : i + 3])
|
||||
|
||||
if len(splitted) == 2: # add last group if it's missing
|
||||
splitted.append("0")
|
||||
|
||||
groups = [splitted[0]]
|
||||
for part in splitted[1:-1]:
|
||||
groups.append(part.ljust(3, "0"))
|
||||
groups.append(part.ljust(3, "0"))
|
||||
if splitted[-1] == "0":
|
||||
groups.append(splitted[-1])
|
||||
else:
|
||||
@ -78,11 +78,11 @@ def mangle_version(up_pv):
|
||||
|
||||
|
||||
def cpan_mangle_version(pv):
|
||||
pos = pv.find('.')
|
||||
pos = pv.find(".")
|
||||
if pos <= 0:
|
||||
return pv
|
||||
up_pv = pv.replace('.', '')
|
||||
up_pv = up_pv[0:pos] + '.' + up_pv[pos:]
|
||||
up_pv = pv.replace(".", "")
|
||||
up_pv = up_pv[0:pos] + "." + up_pv[pos:]
|
||||
return up_pv
|
||||
|
||||
|
||||
@ -99,17 +99,17 @@ def scan_url(pkg, url, options):
|
||||
|
||||
output.einfo("Using CPAN API: %s", remote_pkg)
|
||||
|
||||
return scan_pkg(pkg, {'data': remote_pkg})
|
||||
return scan_pkg(pkg, {"data": remote_pkg})
|
||||
|
||||
|
||||
def scan_pkg(pkg, options):
|
||||
remote_pkg = options['data']
|
||||
remote_pkg = options["data"]
|
||||
|
||||
# Defaults to CPAN mangling rules
|
||||
if 'versionmangle' not in options:
|
||||
options['versionmangle'] = ['cpan', 'gentoo']
|
||||
if "versionmangle" not in options:
|
||||
options["versionmangle"] = ["cpan", "gentoo"]
|
||||
|
||||
url = 'http://search.cpan.org/api/dist/%s' % remote_pkg
|
||||
url = "http://search.cpan.org/api/dist/%s" % remote_pkg
|
||||
cp, ver, rev = pkg.cp, pkg.version, pkg.revision
|
||||
m_ver = cpan_mangle_version(ver)
|
||||
|
||||
@ -128,19 +128,19 @@ def scan_pkg(pkg, options):
|
||||
data = fp.read()
|
||||
data = json.loads(data)
|
||||
|
||||
if 'releases' not in data:
|
||||
if "releases" not in data:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for version in data['releases']:
|
||||
#if version['status'] == 'testing':
|
||||
for version in data["releases"]:
|
||||
# if version['status'] == 'testing':
|
||||
# continue
|
||||
|
||||
up_pv = version['version']
|
||||
up_pv = version["version"]
|
||||
pv = mangling.mangle_version(up_pv, options)
|
||||
|
||||
if up_pv.startswith('v'):
|
||||
if up_pv.startswith("v"):
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
else:
|
||||
@ -148,11 +148,11 @@ def scan_pkg(pkg, options):
|
||||
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
||||
continue
|
||||
|
||||
url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
|
||||
version['cpanid'][0],
|
||||
version['cpanid'][0:1],
|
||||
version['cpanid'],
|
||||
version['archive']
|
||||
url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
|
||||
version["cpanid"][0],
|
||||
version["cpanid"][0:1],
|
||||
version["cpanid"],
|
||||
version["archive"],
|
||||
)
|
||||
|
||||
url = mangling.mangle_url(url, options)
|
||||
|
@ -19,7 +19,7 @@ def can_handle(pkg, url=None):
|
||||
def scan_pkg(pkg, options):
|
||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||
|
||||
packages_url, package_name = options['data'].strip().split(" ", 1)
|
||||
packages_url, package_name = options["data"].strip().split(" ", 1)
|
||||
|
||||
output.einfo("Using Debian Packages: " + packages_url)
|
||||
|
||||
|
@ -17,7 +17,7 @@ def can_handle(pkg, url=None):
|
||||
def scan_pkg(pkg, options):
|
||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||
|
||||
package = options['data'].strip()
|
||||
package = options["data"].strip()
|
||||
|
||||
output.einfo("Using FreeCode handler: " + package)
|
||||
|
||||
@ -25,8 +25,7 @@ def scan_pkg(pkg, options):
|
||||
content = str(fp.read())
|
||||
|
||||
result = re.findall(
|
||||
r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package,
|
||||
content
|
||||
r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package, content
|
||||
)
|
||||
|
||||
ret = []
|
||||
@ -34,15 +33,15 @@ def scan_pkg(pkg, options):
|
||||
pv = mangling.mangle_version(up_pv, options)
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases/%s" %
|
||||
(package, release_id))
|
||||
fp = urllib.request.urlopen(
|
||||
"http://freecode.com/projects/%s/releases/%s" % (package, release_id)
|
||||
)
|
||||
content = str(fp.read())
|
||||
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
|
||||
fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
|
||||
content = str(fp.read())
|
||||
url = re.findall(
|
||||
r'In case it doesn\'t, click here: <a href="([^"]+)"',
|
||||
content
|
||||
r'In case it doesn\'t, click here: <a href="([^"]+)"', content
|
||||
)[0]
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
||||
|
@ -11,8 +11,15 @@ except ImportError:
|
||||
|
||||
import portage
|
||||
|
||||
from euscan import output, helpers, mangling, CONFIG, SCANDIR_BLACKLIST_URLS, \
|
||||
BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
|
||||
from euscan import (
|
||||
output,
|
||||
helpers,
|
||||
mangling,
|
||||
CONFIG,
|
||||
SCANDIR_BLACKLIST_URLS,
|
||||
BRUTEFORCE_BLACKLIST_PACKAGES,
|
||||
BRUTEFORCE_BLACKLIST_URLS,
|
||||
)
|
||||
|
||||
HANDLER_NAME = "generic"
|
||||
CONFIDENCE = 45
|
||||
@ -55,7 +62,7 @@ def scan_html(data, url, pattern):
|
||||
soup = BeautifulSoup(data, features="lxml")
|
||||
results = []
|
||||
|
||||
for link in soup.findAll('a'):
|
||||
for link in soup.findAll("a"):
|
||||
href = link.get("href")
|
||||
if not href:
|
||||
continue
|
||||
@ -66,11 +73,9 @@ def scan_html(data, url, pattern):
|
||||
match = re.search(pattern, href, re.I)
|
||||
if match:
|
||||
results.append(
|
||||
(".".join([x for x in match.groups() if x is not None]),
|
||||
match.group(0))
|
||||
(".".join([x for x in match.groups() if x is not None]), match.group(0))
|
||||
)
|
||||
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@ -83,8 +88,7 @@ def scan_ftp(data, url, pattern):
|
||||
match = re.search(pattern, line, re.I)
|
||||
if match:
|
||||
results.append(
|
||||
(".".join([x for x in match.groups() if x is not None]),
|
||||
match.group(0))
|
||||
(".".join([x for x in match.groups() if x is not None]), match.group(0))
|
||||
)
|
||||
return results
|
||||
|
||||
@ -116,7 +120,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
||||
|
||||
if re.search(b"<\s*a\s+[^>]*href", data, re.I):
|
||||
results.extend(scan_html(data, url, pattern))
|
||||
elif url.startswith('ftp://'):
|
||||
elif url.startswith("ftp://"):
|
||||
results.extend(scan_ftp(data, url, pattern))
|
||||
|
||||
versions = []
|
||||
@ -136,8 +140,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
||||
versions.append((path, pv, HANDLER_NAME, confidence))
|
||||
|
||||
if steps:
|
||||
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
|
||||
options)
|
||||
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url, options)
|
||||
versions.extend(ret)
|
||||
|
||||
return versions
|
||||
@ -160,16 +163,14 @@ def scan_url(pkg, url, options):
|
||||
if ver not in resolved_url:
|
||||
newver = helpers.version_change_end_sep(ver)
|
||||
if newver and newver in resolved_url:
|
||||
output.einfo(
|
||||
"Version: using %s instead of %s" % (newver, ver)
|
||||
)
|
||||
output.einfo("Version: using %s instead of %s" % (newver, ver))
|
||||
ver = newver
|
||||
|
||||
template = helpers.template_from_url(resolved_url, ver)
|
||||
if '${' not in template:
|
||||
if "${" not in template:
|
||||
output.einfo(
|
||||
"Url doesn't seems to depend on version: %s not found in %s" %
|
||||
(ver, resolved_url)
|
||||
"Url doesn't seems to depend on version: %s not found in %s"
|
||||
% (ver, resolved_url)
|
||||
)
|
||||
return []
|
||||
else:
|
||||
@ -220,10 +221,11 @@ def brute_force(pkg, url):
|
||||
|
||||
template = helpers.template_from_url(url, ver)
|
||||
|
||||
if '${PV}' not in template:
|
||||
if "${PV}" not in template:
|
||||
output.einfo(
|
||||
"Url doesn't seems to depend on full version: %s not found in %s" %
|
||||
(ver, url))
|
||||
"Url doesn't seems to depend on full version: %s not found in %s"
|
||||
% (ver, url)
|
||||
)
|
||||
return []
|
||||
else:
|
||||
output.einfo("Brute forcing: %s" % template)
|
||||
@ -250,19 +252,15 @@ def brute_force(pkg, url):
|
||||
|
||||
if not infos:
|
||||
continue
|
||||
confidence = confidence_score(try_url, url,
|
||||
minimum=BRUTEFORCE_CONFIDENCE)
|
||||
confidence = confidence_score(try_url, url, minimum=BRUTEFORCE_CONFIDENCE)
|
||||
result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence])
|
||||
|
||||
if len(result) > CONFIG['brute-force-false-watermark']:
|
||||
output.einfo(
|
||||
"Broken server detected ! Skipping brute force."
|
||||
)
|
||||
if len(result) > CONFIG["brute-force-false-watermark"]:
|
||||
output.einfo("Broken server detected ! Skipping brute force.")
|
||||
return []
|
||||
|
||||
if CONFIG["brute-force-recursive"]:
|
||||
for v in helpers.gen_versions(list(components),
|
||||
CONFIG["brute-force"]):
|
||||
for v in helpers.gen_versions(list(components), CONFIG["brute-force"]):
|
||||
if v not in versions and tuple(v) not in done:
|
||||
versions.append(v)
|
||||
|
||||
|
@ -12,18 +12,18 @@ PRIORITY = 90
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and url.startswith('mirror://github/')
|
||||
return url and url.startswith("mirror://github/")
|
||||
|
||||
|
||||
def guess_package(cp, url):
|
||||
match = re.search('^mirror://github/(.*?)/(.*?)/(.*)$', url)
|
||||
match = re.search("^mirror://github/(.*?)/(.*?)/(.*)$", url)
|
||||
|
||||
assert(match)
|
||||
assert match
|
||||
return (match.group(1), match.group(2), match.group(3))
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
'http://developer.github.com/v3/repos/downloads/'
|
||||
"http://developer.github.com/v3/repos/downloads/"
|
||||
|
||||
user, project, filename = guess_package(pkg.cpv, url)
|
||||
|
||||
@ -35,25 +35,27 @@ def scan_url(pkg, url, options):
|
||||
# now create a filename-matching regexp
|
||||
# XXX: supposedly replace first with (?P<foo>...)
|
||||
# and remaining ones with (?P=foo)
|
||||
fnre = re.compile('^%s$' % \
|
||||
re.escape(filename).replace(re.escape(ver), '(.*?)'))
|
||||
fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)"))
|
||||
|
||||
output.einfo("Using github API for: project=%s user=%s filename=%s" % \
|
||||
(project, user, filename))
|
||||
output.einfo(
|
||||
"Using github API for: project=%s user=%s filename=%s"
|
||||
% (project, user, filename)
|
||||
)
|
||||
|
||||
dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
|
||||
(user, project))
|
||||
dlreq = urllib.request.urlopen(
|
||||
"https://api.github.com/repos/%s/%s/downloads" % (user, project)
|
||||
)
|
||||
dls = json.load(dlreq)
|
||||
|
||||
ret = []
|
||||
for dl in dls:
|
||||
m = fnre.match(dl['name'])
|
||||
m = fnre.match(dl["name"])
|
||||
|
||||
if m:
|
||||
pv = mangling.mangle_version(m.group(1), options)
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
|
||||
url = mangling.mangle_url(dl['html_url'], options)
|
||||
url = mangling.mangle_url(dl["html_url"], options)
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
||||
|
@ -16,15 +16,15 @@ HANDLER_NAME = "gnome"
|
||||
CONFIDENCE = 100
|
||||
PRIORITY = 90
|
||||
|
||||
GNOME_URL_SOURCE = 'http://ftp.gnome.org/pub/GNOME/sources'
|
||||
GNOME_URL_SOURCE = "http://ftp.gnome.org/pub/GNOME/sources"
|
||||
|
||||
|
||||
def can_handle(_pkg, url=None):
|
||||
return url and url.startswith('mirror://gnome/')
|
||||
return url and url.startswith("mirror://gnome/")
|
||||
|
||||
|
||||
def guess_package(cp, url):
|
||||
match = re.search('mirror://gnome/sources/([^/]+)/.*', url)
|
||||
match = re.search("mirror://gnome/sources/([^/]+)/.*", url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
@ -34,27 +34,27 @@ def guess_package(cp, url):
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
'http://ftp.gnome.org/pub/GNOME/sources/'
|
||||
"http://ftp.gnome.org/pub/GNOME/sources/"
|
||||
package = {
|
||||
'data': guess_package(pkg.cpv, url),
|
||||
'type': 'gnome',
|
||||
"data": guess_package(pkg.cpv, url),
|
||||
"type": "gnome",
|
||||
}
|
||||
return scan_pkg(pkg, package)
|
||||
|
||||
|
||||
def scan_pkg(pkg, options):
|
||||
package = options['data']
|
||||
package = options["data"]
|
||||
|
||||
output.einfo("Using Gnome json cache: " + package)
|
||||
|
||||
fp = urllib.request.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
|
||||
fp = urllib.request.urlopen("/".join([GNOME_URL_SOURCE, package, "cache.json"]))
|
||||
content = fp.read()
|
||||
fp.close()
|
||||
|
||||
cache = json.loads(content, encoding='ascii')
|
||||
cache = json.loads(content, encoding="ascii")
|
||||
|
||||
if cache[0] != 4:
|
||||
output.eerror('Unknow cache format detected')
|
||||
output.eerror("Unknow cache format detected")
|
||||
return []
|
||||
|
||||
versions = cache[2][package]
|
||||
@ -72,13 +72,12 @@ def scan_pkg(pkg, options):
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
up_files = cache[1][package][up_pv]
|
||||
for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'):
|
||||
for tarball_comp in ("tar.xz", "tar.bz2", "tar.gz"):
|
||||
if tarball_comp in up_files:
|
||||
url = '/'.join([GNOME_URL_SOURCE, package,
|
||||
up_files[tarball_comp]])
|
||||
url = "/".join([GNOME_URL_SOURCE, package, up_files[tarball_comp]])
|
||||
break
|
||||
else:
|
||||
output.ewarn('No tarball for release %s' % up_pv)
|
||||
output.ewarn("No tarball for release %s" % up_pv)
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
|
||||
return ret
|
||||
|
@ -23,6 +23,7 @@ def can_handle(pkg, url=None):
|
||||
|
||||
return re.match(package_name_regex, url)
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
output.einfo("Using Google Code handler")
|
||||
|
||||
@ -31,9 +32,7 @@ def scan_url(pkg, url, options):
|
||||
package_name = re.match(package_name_regex, url).group(1)
|
||||
base_url = "http://code.google.com/p/%s/downloads/list" % package_name
|
||||
|
||||
file_pattern = regex_from_template(
|
||||
url.split("/")[-1].replace(ver, "${PV}")
|
||||
)
|
||||
file_pattern = regex_from_template(url.split("/")[-1].replace(ver, "${PV}"))
|
||||
|
||||
result = url_scan(pkg, base_url, file_pattern)
|
||||
|
||||
|
@ -6,14 +6,14 @@ HANDLER_NAME = "kde"
|
||||
|
||||
|
||||
def can_handle(pkg, url):
|
||||
return url and url.startswith('mirror://kde/')
|
||||
return url and url.startswith("mirror://kde/")
|
||||
|
||||
|
||||
def clean_results(results):
|
||||
ret = []
|
||||
|
||||
for path, version, _, confidence in results:
|
||||
if version == '5SUMS':
|
||||
if version == "5SUMS":
|
||||
continue
|
||||
ret.append((path, version, HANDLER_NAME, confidence))
|
||||
|
||||
@ -23,16 +23,15 @@ def clean_results(results):
|
||||
def scan_url(pkg, url, options):
|
||||
results = generic.scan(pkg.cpv, url)
|
||||
|
||||
if generic.startswith('mirror://kde/unstable/'):
|
||||
url = generic.replace('mirror://kde/unstable/', 'mirror://kde/stable/')
|
||||
if generic.startswith("mirror://kde/unstable/"):
|
||||
url = generic.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||
results += generic.scan(pkg.cpv, url)
|
||||
|
||||
if not results: # if nothing was found go brute forcing
|
||||
results = generic.brute_force(pkg.cpv, url)
|
||||
|
||||
if generic.startswith('mirror://kde/unstable/'):
|
||||
url = generic.replace('mirror://kde/unstable/',
|
||||
'mirror://kde/stable/')
|
||||
if generic.startswith("mirror://kde/unstable/"):
|
||||
url = generic.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||
results += generic.brute_force(pkg.cpv, url)
|
||||
|
||||
return clean_results(results)
|
||||
|
@ -6,7 +6,8 @@ PRIORITY = 90
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and url.startswith('http://%s.php.net/get/' % HANDLER_NAME)
|
||||
return url and url.startswith("http://%s.php.net/get/" % HANDLER_NAME)
|
||||
|
||||
|
||||
scan_url = php.scan_url
|
||||
scan_pkg = php.scan_pkg
|
||||
|
@ -4,8 +4,10 @@ HANDLER_NAME = "pecl"
|
||||
CONFIDENCE = 100
|
||||
PRIORITY = 90
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and url.startswith('http://%s.php.net/get/' % HANDLER_NAME)
|
||||
return url and url.startswith("http://%s.php.net/get/" % HANDLER_NAME)
|
||||
|
||||
|
||||
scan_url = php.scan_url
|
||||
scan_pkg = php.scan_pkg
|
||||
|
@ -9,11 +9,13 @@ HANDLER_NAME = "php"
|
||||
CONFIDENCE = 100
|
||||
PRIORITY = 90
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return False
|
||||
|
||||
|
||||
def guess_package_and_channel(cp, url):
|
||||
match = re.search('http://(.*)\.php\.net/get/(.*)-(.*).tgz', url)
|
||||
match = re.search("http://(.*)\.php\.net/get/(.*)-(.*).tgz", url)
|
||||
|
||||
if match:
|
||||
host = match.group(1)
|
||||
@ -26,15 +28,16 @@ def guess_package_and_channel(cp, url):
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
package, channel = guess_package_and_channel(pkg.cp, url)
|
||||
return scan_pkg(pkg, {'type' : channel, 'data' : package })
|
||||
return scan_pkg(pkg, {"type": channel, "data": package})
|
||||
|
||||
|
||||
def scan_pkg(pkg, options):
|
||||
cp, ver, rev = pkg.cp, pkg.version, pkg.revision
|
||||
|
||||
package = options['data']
|
||||
channel = options['type']
|
||||
package = options["data"]
|
||||
channel = options["type"]
|
||||
|
||||
url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower())
|
||||
url = "http://%s.php.net/rest/r/%s/allreleases.xml" % (channel, package.lower())
|
||||
|
||||
output.einfo("Using: " + url)
|
||||
|
||||
@ -61,7 +64,7 @@ def scan_pkg(pkg, options):
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
|
||||
url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
|
||||
url = "http://%s.php.net/get/%s-%s.tgz" % (channel, package, up_pv)
|
||||
url = mangling.mangle_url(url, options)
|
||||
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
|
@ -11,11 +11,11 @@ PRIORITY = 90
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and url.startswith('mirror://pypi/')
|
||||
return url and url.startswith("mirror://pypi/")
|
||||
|
||||
|
||||
def guess_package(cp, url):
|
||||
match = re.search('mirror://pypi/\w+/(.*)/.*', url)
|
||||
match = re.search("mirror://pypi/\w+/(.*)/.*", url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
@ -25,18 +25,18 @@ def guess_package(cp, url):
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
'http://wiki.python.org/moin/PyPiXmlRpc'
|
||||
"http://wiki.python.org/moin/PyPiXmlRpc"
|
||||
|
||||
package = guess_package(pkg.cpv, url)
|
||||
return scan_pkg(pkg, {'data': package})
|
||||
return scan_pkg(pkg, {"data": package})
|
||||
|
||||
|
||||
def scan_pkg(pkg, options):
|
||||
package = options['data']
|
||||
package = options["data"]
|
||||
|
||||
output.einfo("Using PyPi XMLRPC: " + package)
|
||||
|
||||
client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
|
||||
client = xmlrpc.client.ServerProxy("https://pypi.python.org/pypi")
|
||||
versions = client.package_releases(package)
|
||||
|
||||
if not versions:
|
||||
@ -52,7 +52,6 @@ def scan_pkg(pkg, options):
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
urls = client.release_urls(package, up_pv)
|
||||
urls = " ".join([mangling.mangle_url(infos['url'], options)
|
||||
for infos in urls])
|
||||
urls = " ".join([mangling.mangle_url(infos["url"], options) for infos in urls])
|
||||
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
||||
|
@ -11,13 +11,13 @@ PRIORITY = 90
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and url.startswith('mirror://rubygems/')
|
||||
return url and url.startswith("mirror://rubygems/")
|
||||
|
||||
|
||||
def guess_gem(cpv, url):
|
||||
match = re.search('mirror://rubygems/(.*).gem', url)
|
||||
match = re.search("mirror://rubygems/(.*).gem", url)
|
||||
if match:
|
||||
cpv = 'fake/%s' % match.group(1)
|
||||
cpv = "fake/%s" % match.group(1)
|
||||
|
||||
ret = portage.pkgsplit(cpv)
|
||||
if not ret:
|
||||
@ -30,23 +30,22 @@ def guess_gem(cpv, url):
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
'http://guides.rubygems.org/rubygems-org-api/#gemversion'
|
||||
"http://guides.rubygems.org/rubygems-org-api/#gemversion"
|
||||
|
||||
gem = guess_gem(pkg.cpv, url)
|
||||
|
||||
if not gem:
|
||||
output.eerror("Can't guess gem name using %s and %s" % \
|
||||
(pkg.cpv, url))
|
||||
output.eerror("Can't guess gem name using %s and %s" % (pkg.cpv, url))
|
||||
return []
|
||||
|
||||
output.einfo("Using RubyGem API: %s" % gem)
|
||||
|
||||
return scan_pkg(pkg, {'data': gem})
|
||||
return scan_pkg(pkg, {"data": gem})
|
||||
|
||||
|
||||
def scan_pkg(pkg, options):
|
||||
gem = options['data']
|
||||
url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
|
||||
gem = options["data"]
|
||||
url = "http://rubygems.org/api/v1/versions/%s.json" % gem
|
||||
|
||||
try:
|
||||
fp = helpers.urlopen(url)
|
||||
@ -65,11 +64,11 @@ def scan_pkg(pkg, options):
|
||||
|
||||
ret = []
|
||||
for version in versions:
|
||||
up_pv = version['number']
|
||||
up_pv = version["number"]
|
||||
pv = mangling.mangle_version(up_pv, options)
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
|
||||
url = "http://rubygems.org/gems/%s-%s.gem" % (gem, up_pv)
|
||||
url = mangling.mangle_url(url, options)
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
||||
|
@ -28,14 +28,11 @@ def scan_url(pkg, url, options):
|
||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||
|
||||
project, filename = re.search(
|
||||
"mirror://sourceforge/([^/]+)/(?:.*/)?([^/]+)",
|
||||
url
|
||||
"mirror://sourceforge/([^/]+)/(?:.*/)?([^/]+)", url
|
||||
).groups()
|
||||
|
||||
base_url = "http://qa.debian.org/watch/sf.php/%s" % project
|
||||
file_pattern = regex_from_template(
|
||||
filename.replace(ver, "${PV}")
|
||||
)
|
||||
file_pattern = regex_from_template(filename.replace(ver, "${PV}"))
|
||||
|
||||
result = url_scan(pkg, base_url, file_pattern)
|
||||
|
||||
|
@ -33,7 +33,7 @@ def handle_directory_patterns(base, file_pattern):
|
||||
i += 1
|
||||
basedir = "/".join(basedir)
|
||||
directory_pattern = splitted[i]
|
||||
final = "/".join(splitted[i + 1:])
|
||||
final = "/".join(splitted[i + 1 :])
|
||||
|
||||
try:
|
||||
fp = helpers.urlopen(basedir)
|
||||
@ -52,15 +52,14 @@ def handle_directory_patterns(base, file_pattern):
|
||||
else:
|
||||
scan_data = generic.scan_html(data, basedir, directory_pattern)
|
||||
|
||||
return [("/".join((basedir, path, final)), file_pattern)
|
||||
for _, path in scan_data]
|
||||
return [("/".join((basedir, path, final)), file_pattern) for _, path in scan_data]
|
||||
|
||||
|
||||
def read_options(options):
|
||||
try:
|
||||
base, file_pattern = options['data'].split(" ")[:2]
|
||||
base, file_pattern = options["data"].split(" ")[:2]
|
||||
except ValueError:
|
||||
base, file_pattern = options['data'], None
|
||||
base, file_pattern = options["data"], None
|
||||
|
||||
# the file pattern can be in the base url
|
||||
pattern_regex = r"/([^/]*\([^/]*\)[^/]*)$"
|
||||
@ -70,9 +69,7 @@ def read_options(options):
|
||||
base = base.replace(file_pattern, "")
|
||||
|
||||
# handle sf.net specially
|
||||
base = base.replace(
|
||||
"http://sf.net/", "http://qa.debian.org/watch/sf.php/"
|
||||
)
|
||||
base = base.replace("http://sf.net/", "http://qa.debian.org/watch/sf.php/")
|
||||
|
||||
return base, file_pattern
|
||||
|
||||
|
Reference in New Issue
Block a user