Compare commits
No commits in common. "443b5f62fd261eeda660802d6a90e2e9b4a59901" and "eedf3c5939b969bbff851464fc3192be2a2c4ffc" have entirely different histories.
443b5f62fd
...
eedf3c5939
18
bin/euscan
18
bin/euscan
@ -199,7 +199,7 @@ def print_usage(_error=None, help=None):
|
|||||||
# turquoise("`man %s`" % __productname__), file=out)
|
# turquoise("`man %s`" % __productname__), file=out)
|
||||||
|
|
||||||
|
|
||||||
class ParseArgsError(Exception):
|
class ParseArgsException(Exception):
|
||||||
"""For parseArgs() -> main() communications."""
|
"""For parseArgs() -> main() communications."""
|
||||||
|
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
@ -220,9 +220,9 @@ def parse_args():
|
|||||||
return_code = True
|
return_code = True
|
||||||
for o, a in opts:
|
for o, a in opts:
|
||||||
if o in ("-h", "--help"):
|
if o in ("-h", "--help"):
|
||||||
raise ParseArgsError("help")
|
raise ParseArgsException("help")
|
||||||
elif o in ("-V", "--version"):
|
elif o in ("-V", "--version"):
|
||||||
raise ParseArgsError("version")
|
raise ParseArgsException("version")
|
||||||
elif o in ("-C", "--nocolor"):
|
elif o in ("-C", "--nocolor"):
|
||||||
CONFIG["nocolor"] = True
|
CONFIG["nocolor"] = True
|
||||||
pp.output.nocolor()
|
pp.output.nocolor()
|
||||||
@ -283,14 +283,14 @@ def parse_args():
|
|||||||
# apply getopts to command line, show partial help on failure
|
# apply getopts to command line, show partial help on failure
|
||||||
try:
|
try:
|
||||||
opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
|
opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
|
||||||
except getopt.GetoptError as exc:
|
except getopt.GetoptError:
|
||||||
raise ParseArgsError(opts_mode + "-options") from exc
|
raise ParseArgsException(opts_mode + "-options")
|
||||||
|
|
||||||
# set options accordingly
|
# set options accordingly
|
||||||
option_switch(opts)
|
option_switch(opts)
|
||||||
|
|
||||||
if len(args) < 1:
|
if len(args) < 1:
|
||||||
raise ParseArgsError("packages")
|
raise ParseArgsException("packages")
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@ -306,7 +306,7 @@ def main():
|
|||||||
# parse command line options and actions
|
# parse command line options and actions
|
||||||
try:
|
try:
|
||||||
queries = parse_args()
|
queries = parse_args()
|
||||||
except ParseArgsError as e:
|
except ParseArgsException as e:
|
||||||
if e.value == "help":
|
if e.value == "help":
|
||||||
print_usage(help="all")
|
print_usage(help="all")
|
||||||
exit_helper(0)
|
exit_helper(0)
|
||||||
@ -362,7 +362,7 @@ def main():
|
|||||||
exit_helper(1)
|
exit_helper(1)
|
||||||
|
|
||||||
except GentoolkitException as err:
|
except GentoolkitException as err:
|
||||||
output.eerror(f"{query}: {str(err)}")
|
output.eerror("%s: %s" % (query, str(err)))
|
||||||
exit_helper(1)
|
exit_helper(1)
|
||||||
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
@ -372,7 +372,7 @@ def main():
|
|||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
print("-" * 60)
|
print("-" * 60)
|
||||||
|
|
||||||
output.eerror(f"{query}: {str(err)}")
|
output.eerror("%s: %s" % (query, str(err)))
|
||||||
exit_helper(1)
|
exit_helper(1)
|
||||||
|
|
||||||
if not ret and not CONFIG["quiet"]:
|
if not ret and not CONFIG["quiet"]:
|
||||||
|
@ -30,10 +30,10 @@ def guess_indent_values(before):
|
|||||||
def guess_for_tags(tags):
|
def guess_for_tags(tags):
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
for i in [0, 2, 4, 6, 8, 12, 16]:
|
for i in [0, 2, 4, 6, 8, 12, 16]:
|
||||||
if f"\n{' ' * i}<{tag}" in before:
|
if "\n%s<%s" % (" " * i, tag) in before:
|
||||||
return i, False
|
return i, False
|
||||||
for i in [0, 1, 2]:
|
for i in [0, 1, 2]:
|
||||||
if f"\n{'\t' * i}<{tag}" in before:
|
if "\n%s<%s" % ("\t" * i, tag) in before:
|
||||||
return i, True
|
return i, True
|
||||||
return -1, False
|
return -1, False
|
||||||
|
|
||||||
@ -119,11 +119,11 @@ def get_deb_url(name):
|
|||||||
content = opened.read()
|
content = opened.read()
|
||||||
|
|
||||||
for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer("a")):
|
for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer("a")):
|
||||||
if re.match(r"[^\s]+\.debian\.tar\.(?:gz|bz2)", link.text):
|
if re.match("[^\s]+\.debian\.tar\.(?:gz|bz2)", link.text):
|
||||||
deb_url = link["href"]
|
deb_url = link["href"]
|
||||||
deb_type = "source"
|
deb_type = "source"
|
||||||
break
|
break
|
||||||
if re.match(r"[^\s]+\.diff\.gz", link.text):
|
if re.match("[^\s]+\.diff\.gz", link.text):
|
||||||
deb_url = link["href"]
|
deb_url = link["href"]
|
||||||
deb_type = "diff"
|
deb_type = "diff"
|
||||||
break
|
break
|
||||||
@ -157,7 +157,7 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
for watch_line in watch_data.split("\n"): # there can be multiple lines
|
for watch_line in watch_data.split("\n"): # there can be multiple lines
|
||||||
watch_line = " ".join(watch_line.split()) # remove extra spaces and \n
|
watch_line = " ".join(watch_line.split()) # remove extra spaces and \n
|
||||||
|
|
||||||
version_parse = re.match(r"version=(\d+?)", watch_line)
|
version_parse = re.match("version=(\d+?)", watch_line)
|
||||||
if version_parse:
|
if version_parse:
|
||||||
version = version_parse.group(1)
|
version = version_parse.group(1)
|
||||||
continue
|
continue
|
||||||
@ -180,7 +180,7 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
if opt_name in valid:
|
if opt_name in valid:
|
||||||
if opt_name == "uversionmangle":
|
if opt_name == "uversionmangle":
|
||||||
opt_name = "versionmangle"
|
opt_name = "versionmangle"
|
||||||
cleaned_opts.append(f'{opt_name}="{opt_value}"')
|
cleaned_opts.append('%s="%s"' % (opt_name, opt_value))
|
||||||
opts = " ".join(cleaned_opts)
|
opts = " ".join(cleaned_opts)
|
||||||
|
|
||||||
# clean url from useless stuff. Just keep <base> [<filepattern>]
|
# clean url from useless stuff. Just keep <base> [<filepattern>]
|
||||||
@ -188,9 +188,14 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
url = " ".join([x for x in url_search.groups() if x is not None])
|
url = " ".join([x for x in url_search.groups() if x is not None])
|
||||||
|
|
||||||
if opts:
|
if opts:
|
||||||
watch_tag = f'{indent}<watch version="{version}" {opts}>{url}</watch>'
|
watch_tag = '%s<watch version="%s" %s>%s</watch>' % (
|
||||||
|
indent,
|
||||||
|
version,
|
||||||
|
opts,
|
||||||
|
url,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
watch_tag = f'{indent}<watch version="{version}">{url}</watch>'
|
watch_tag = '%s<watch version="%s">%s</watch>' % (indent, version, url)
|
||||||
watch_tags.append(watch_tag)
|
watch_tags.append(watch_tag)
|
||||||
|
|
||||||
watch_tags = "\n".join(watch_tags)
|
watch_tags = "\n".join(watch_tags)
|
||||||
@ -198,7 +203,11 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
if "<upstream>" in data:
|
if "<upstream>" in data:
|
||||||
data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1)
|
data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1)
|
||||||
else:
|
else:
|
||||||
rep = f"{rindent}<upstream>\n{watch_tags}\n{rindent}</upstream>\n</pkgmetadata>"
|
rep = "%s<upstream>\n%s\n%s</upstream>\n</pkgmetadata>" % (
|
||||||
|
rindent,
|
||||||
|
watch_tags,
|
||||||
|
rindent,
|
||||||
|
)
|
||||||
data = data.replace("</pkgmetadata>", rep, 1)
|
data = data.replace("</pkgmetadata>", rep, 1)
|
||||||
|
|
||||||
if not diff:
|
if not diff:
|
||||||
|
@ -40,6 +40,3 @@ src_paths = ["bin/euscan", "src/euscan/"]
|
|||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
extend-include = ["bin/euscan", "bin/euscan_patch_metadata"]
|
extend-include = ["bin/euscan", "bin/euscan_patch_metadata"]
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
extend-select = ["B", "E", "N", "UP", "W"]
|
|
||||||
|
@ -74,13 +74,13 @@ BRUTEFORCE_BLACKLIST_URLS = [
|
|||||||
ROBOTS_TXT_BLACKLIST_DOMAINS = [
|
ROBOTS_TXT_BLACKLIST_DOMAINS = [
|
||||||
"(.*)sourceforge(.*)",
|
"(.*)sourceforge(.*)",
|
||||||
"(.*)github.com",
|
"(.*)github.com",
|
||||||
r"(.*)qt\.nokia\.com(.*)",
|
"(.*)qt\.nokia\.com(.*)",
|
||||||
r"(.*)chromium\.org(.*)",
|
"(.*)chromium\.org(.*)",
|
||||||
r"(.*)nodejs\.org(.*)",
|
"(.*)nodejs\.org(.*)",
|
||||||
r"(.*)download\.mono-project\.com(.*)",
|
"(.*)download\.mono-project\.com(.*)",
|
||||||
r"(.*)fedorahosted\.org(.*)",
|
"(.*)fedorahosted\.org(.*)",
|
||||||
r"(.*)download\.tuxfamily\.org(.*)",
|
"(.*)download\.tuxfamily\.org(.*)",
|
||||||
r"(.*)festvox\.org(.*)",
|
"(.*)festvox\.org(.*)",
|
||||||
]
|
]
|
||||||
|
|
||||||
from euscan.out import EuscanOutput # noqa: E402
|
from euscan.out import EuscanOutput # noqa: E402
|
||||||
|
@ -71,7 +71,7 @@ def package_from_ebuild(ebuild):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
ebuild_split = ebuild.split("/")
|
ebuild_split = ebuild.split("/")
|
||||||
cpv = f"{ebuild_split[-3]}/{pf}"
|
cpv = "%s/%s" % (ebuild_split[-3], pf)
|
||||||
|
|
||||||
if not portage.catpkgsplit(cpv):
|
if not portage.catpkgsplit(cpv):
|
||||||
return False
|
return False
|
||||||
|
@ -13,7 +13,7 @@ from euscan import CONFIG, output
|
|||||||
handlers = {"package": [], "url": [], "all": {}}
|
handlers = {"package": [], "url": [], "all": {}}
|
||||||
|
|
||||||
# autoimport all modules in this directory and append them to handlers list
|
# autoimport all modules in this directory and append them to handlers list
|
||||||
for loader, module_name, _is_pkg in pkgutil.walk_packages(__path__):
|
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
||||||
module = loader.find_spec(module_name).loader.load_module(module_name)
|
module = loader.find_spec(module_name).loader.load_module(module_name)
|
||||||
if not hasattr(module, "HANDLER_NAME"):
|
if not hasattr(module, "HANDLER_NAME"):
|
||||||
continue
|
continue
|
||||||
@ -157,7 +157,7 @@ def scan_url(pkg, urls, options, on_progress=None):
|
|||||||
else:
|
else:
|
||||||
output.eerror("Can't find a suitable handler!")
|
output.eerror("Can't find a suitable handler!")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
output.ewarn(f"Handler failed: [{e.__class__.__name__}] {str(e)}")
|
output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, str(e)))
|
||||||
|
|
||||||
if versions and CONFIG["oneshot"]:
|
if versions and CONFIG["oneshot"]:
|
||||||
break
|
break
|
||||||
|
@ -81,7 +81,7 @@ def mangle_version(up_pv):
|
|||||||
pv = ".".join(groups)
|
pv = ".".join(groups)
|
||||||
|
|
||||||
if rc_part:
|
if rc_part:
|
||||||
pv = f"{pv}_rc{rc_part}"
|
pv = "%s_rc%s" % (pv, rc_part)
|
||||||
|
|
||||||
return pv
|
return pv
|
||||||
|
|
||||||
@ -128,7 +128,7 @@ def scan_pkg(pkg, options):
|
|||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
return []
|
return []
|
||||||
except OSError:
|
except IOError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not fp:
|
if not fp:
|
||||||
@ -157,7 +157,13 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
url = f"mirror://cpan/authors/id/{version['cpanid'][0]}/{version['cpanid'][0:1]}/{version['cpanid']}/{version['archive']}"
|
url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
|
||||||
|
version["cpanid"][0],
|
||||||
|
version["cpanid"][0:1],
|
||||||
|
version["cpanid"],
|
||||||
|
version["archive"],
|
||||||
|
)
|
||||||
|
|
||||||
url = mangling.mangle_url(url, options)
|
url = mangling.mangle_url(url, options)
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
fp = urllib.request.urlopen(
|
fp = urllib.request.urlopen(
|
||||||
f"http://freecode.com/projects/{package}/releases/{release_id}"
|
"http://freecode.com/projects/%s/releases/%s" % (package, release_id)
|
||||||
)
|
)
|
||||||
content = str(fp.read())
|
content = str(fp.read())
|
||||||
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
|
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
|
||||||
|
@ -112,7 +112,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
|||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
return []
|
return []
|
||||||
except OSError:
|
except IOError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not fp:
|
if not fp:
|
||||||
@ -122,7 +122,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
|||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
if re.search(rb"<\s*a\s+[^>]*href", data, re.I):
|
if re.search(b"<\s*a\s+[^>]*href", data, re.I):
|
||||||
results.extend(scan_html(data, url, pattern))
|
results.extend(scan_html(data, url, pattern))
|
||||||
elif url.startswith("ftp://"):
|
elif url.startswith("ftp://"):
|
||||||
results.extend(scan_ftp(data, url, pattern))
|
results.extend(scan_ftp(data, url, pattern))
|
||||||
@ -154,7 +154,7 @@ def scan_url(pkg, url, options):
|
|||||||
if CONFIG["scan-dir"]:
|
if CONFIG["scan-dir"]:
|
||||||
for bu in SCANDIR_BLACKLIST_URLS:
|
for bu in SCANDIR_BLACKLIST_URLS:
|
||||||
if re.match(bu, url):
|
if re.match(bu, url):
|
||||||
output.einfo(f"{url} is blacklisted by rule {bu}")
|
output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
resolved_url = helpers.parse_mirror(url)
|
resolved_url = helpers.parse_mirror(url)
|
||||||
@ -167,15 +167,14 @@ def scan_url(pkg, url, options):
|
|||||||
if ver not in resolved_url:
|
if ver not in resolved_url:
|
||||||
newver = helpers.version_change_end_sep(ver)
|
newver = helpers.version_change_end_sep(ver)
|
||||||
if newver and newver in resolved_url:
|
if newver and newver in resolved_url:
|
||||||
output.einfo(f"Version: using {newver} instead of {ver}")
|
output.einfo("Version: using %s instead of %s" % (newver, ver))
|
||||||
ver = newver
|
ver = newver
|
||||||
|
|
||||||
template = helpers.template_from_url(resolved_url, ver)
|
template = helpers.template_from_url(resolved_url, ver)
|
||||||
if "${" not in template:
|
if "${" not in template:
|
||||||
output.einfo(
|
output.einfo(
|
||||||
"Url doesn't seems to depend on version: {} not found in {}".format(
|
"Url doesn't seems to depend on version: %s not found in %s"
|
||||||
ver, resolved_url
|
% (ver, resolved_url)
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
@ -202,12 +201,12 @@ def brute_force(pkg, url):
|
|||||||
|
|
||||||
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
||||||
if re.match(bp, cp):
|
if re.match(bp, cp):
|
||||||
output.einfo(f"{cp} is blacklisted by rule {bp}")
|
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
||||||
if re.match(bp, url):
|
if re.match(bp, url):
|
||||||
output.einfo(f"{cp} is blacklisted by rule {bp}")
|
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
output.einfo("Generating version from " + ver)
|
output.einfo("Generating version from " + ver)
|
||||||
@ -228,7 +227,8 @@ def brute_force(pkg, url):
|
|||||||
|
|
||||||
if "${PV}" not in template:
|
if "${PV}" not in template:
|
||||||
output.einfo(
|
output.einfo(
|
||||||
f"Url doesn't seems to depend on full version: {ver} not found in {url}"
|
"Url doesn't seems to depend on full version: %s not found in %s"
|
||||||
|
% (ver, url)
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
|
@ -44,11 +44,12 @@ def scan_url(pkg, url, options):
|
|||||||
fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)"))
|
fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)"))
|
||||||
|
|
||||||
output.einfo(
|
output.einfo(
|
||||||
f"Using github API for: project={project} user={user} filename={filename}"
|
"Using github API for: project=%s user=%s filename=%s"
|
||||||
|
% (project, user, filename)
|
||||||
)
|
)
|
||||||
|
|
||||||
dlreq = urllib.request.urlopen(
|
dlreq = urllib.request.urlopen(
|
||||||
f"https://api.github.com/repos/{user}/{project}/downloads"
|
"https://api.github.com/repos/%s/%s/downloads" % (user, project)
|
||||||
)
|
)
|
||||||
dls = json.load(dlreq)
|
dls = json.load(dlreq)
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ def can_handle(pkg, url=None):
|
|||||||
|
|
||||||
|
|
||||||
def guess_package_and_channel(cp, url):
|
def guess_package_and_channel(cp, url):
|
||||||
match = re.search(r"http://(.*)\.php\.net/get/(.*)-(.*).tgz", url)
|
match = re.search("http://(.*)\.php\.net/get/(.*)-(.*).tgz", url)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
host = match.group(1)
|
host = match.group(1)
|
||||||
@ -42,7 +42,7 @@ def scan_pkg(pkg, options):
|
|||||||
package = options["data"]
|
package = options["data"]
|
||||||
channel = options["type"]
|
channel = options["type"]
|
||||||
|
|
||||||
url = f"http://{channel}.php.net/rest/r/{package.lower()}/allreleases.xml"
|
url = "http://%s.php.net/rest/r/%s/allreleases.xml" % (channel, package.lower())
|
||||||
|
|
||||||
output.einfo("Using: " + url)
|
output.einfo("Using: " + url)
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ def scan_pkg(pkg, options):
|
|||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
return []
|
return []
|
||||||
except OSError:
|
except IOError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not fp:
|
if not fp:
|
||||||
@ -69,7 +69,7 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
url = f"http://{channel}.php.net/get/{package}-{up_pv}.tgz"
|
url = "http://%s.php.net/get/%s-%s.tgz" % (channel, package, up_pv)
|
||||||
url = mangling.mangle_url(url, options)
|
url = mangling.mangle_url(url, options)
|
||||||
|
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
|
@ -19,7 +19,7 @@ def can_handle(pkg, url=None):
|
|||||||
|
|
||||||
|
|
||||||
def guess_package(cp, url):
|
def guess_package(cp, url):
|
||||||
match = re.search(r"mirror://pypi/\w+/(.*)/.*", url)
|
match = re.search("mirror://pypi/\w+/(.*)/.*", url)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ def scan_url(pkg, url, options):
|
|||||||
gem = guess_gem(pkg.cpv, url)
|
gem = guess_gem(pkg.cpv, url)
|
||||||
|
|
||||||
if not gem:
|
if not gem:
|
||||||
output.eerror(f"Can't guess gem name using {pkg.cpv} and {url}")
|
output.eerror("Can't guess gem name using %s and %s" % (pkg.cpv, url))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
output.einfo("Using RubyGem API: %s" % gem)
|
output.einfo("Using RubyGem API: %s" % gem)
|
||||||
@ -58,7 +58,7 @@ def scan_pkg(pkg, options):
|
|||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
return []
|
return []
|
||||||
except OSError:
|
except IOError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not fp:
|
if not fp:
|
||||||
@ -75,7 +75,7 @@ def scan_pkg(pkg, options):
|
|||||||
pv = mangling.mangle_version(up_pv, options)
|
pv = mangling.mangle_version(up_pv, options)
|
||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
url = f"http://rubygems.org/gems/{gem}-{up_pv}.gem"
|
url = "http://rubygems.org/gems/%s-%s.gem" % (gem, up_pv)
|
||||||
url = mangling.mangle_url(url, options)
|
url = mangling.mangle_url(url, options)
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
return ret
|
return ret
|
||||||
|
@ -24,7 +24,7 @@ def can_handle(*args):
|
|||||||
|
|
||||||
|
|
||||||
def handle_directory_patterns(base, file_pattern):
|
def handle_directory_patterns(base, file_pattern):
|
||||||
r"""
|
"""
|
||||||
Directory pattern matching
|
Directory pattern matching
|
||||||
e.g.: base: ftp://ftp.nessus.org/pub/nessus/nessus-([\d\.]+)/src/
|
e.g.: base: ftp://ftp.nessus.org/pub/nessus/nessus-([\d\.]+)/src/
|
||||||
file_pattern: nessus-core-([\d\.]+)\.tar\.gz
|
file_pattern: nessus-core-([\d\.]+)\.tar\.gz
|
||||||
@ -45,7 +45,7 @@ def handle_directory_patterns(base, file_pattern):
|
|||||||
fp = helpers.urlopen(basedir)
|
fp = helpers.urlopen(basedir)
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
return []
|
return []
|
||||||
except OSError:
|
except IOError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not fp:
|
if not fp:
|
||||||
|
@ -83,7 +83,7 @@ def version_is_nightly(a, b):
|
|||||||
|
|
||||||
def version_blacklisted(cp, version):
|
def version_blacklisted(cp, version):
|
||||||
rule = None
|
rule = None
|
||||||
cpv = f"{cp}-{version}"
|
cpv = "%s-%s" % (cp, version)
|
||||||
|
|
||||||
# Check that the generated cpv can be used by portage
|
# Check that the generated cpv can be used by portage
|
||||||
if not portage.versions.catpkgsplit(cpv):
|
if not portage.versions.catpkgsplit(cpv):
|
||||||
@ -92,9 +92,10 @@ def version_blacklisted(cp, version):
|
|||||||
for bv in BLACKLIST_VERSIONS:
|
for bv in BLACKLIST_VERSIONS:
|
||||||
if dep.match_from_list(bv, [cpv]):
|
if dep.match_from_list(bv, [cpv]):
|
||||||
rule = bv
|
rule = bv
|
||||||
|
None
|
||||||
|
|
||||||
if rule:
|
if rule:
|
||||||
euscan.output.einfo(f"{cpv} is blacklisted by rule {rule}")
|
euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, rule))
|
||||||
return rule is not None
|
return rule is not None
|
||||||
|
|
||||||
|
|
||||||
@ -222,7 +223,7 @@ def gen_versions(components, level):
|
|||||||
|
|
||||||
for i in range(n, n - level, -1):
|
for i in range(n, n - level, -1):
|
||||||
increment_version(components, i - 1)
|
increment_version(components, i - 1)
|
||||||
for _j in range(depth):
|
for j in range(depth):
|
||||||
versions.append(list(components))
|
versions.append(list(components))
|
||||||
increment_version(components, i - 1)
|
increment_version(components, i - 1)
|
||||||
|
|
||||||
@ -263,7 +264,7 @@ def urlallowed(url):
|
|||||||
if protocol == "ftp":
|
if protocol == "ftp":
|
||||||
return True
|
return True
|
||||||
|
|
||||||
baseurl = f"{protocol}://{domain}"
|
baseurl = "%s://%s" % (protocol, domain)
|
||||||
robotsurl = urllib.parse.urljoin(baseurl, "robots.txt")
|
robotsurl = urllib.parse.urljoin(baseurl, "robots.txt")
|
||||||
|
|
||||||
if baseurl in rpcache:
|
if baseurl in rpcache:
|
||||||
@ -279,7 +280,7 @@ def urlallowed(url):
|
|||||||
try:
|
try:
|
||||||
rp.read()
|
rp.read()
|
||||||
rpcache[baseurl] = rp
|
rpcache[baseurl] = rp
|
||||||
except OSError:
|
except IOError:
|
||||||
rp = None
|
rp = None
|
||||||
|
|
||||||
setdefaulttimeout(timeout)
|
setdefaulttimeout(timeout)
|
||||||
@ -289,7 +290,7 @@ def urlallowed(url):
|
|||||||
|
|
||||||
def urlopen(url, timeout=None, verb="GET"):
|
def urlopen(url, timeout=None, verb="GET"):
|
||||||
if not urlallowed(url):
|
if not urlallowed(url):
|
||||||
euscan.output.einfo(f"Url '{url}' blocked by robots.txt")
|
euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not timeout:
|
if not timeout:
|
||||||
@ -369,7 +370,7 @@ def tryurl(fileurl, template):
|
|||||||
|
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
result = None
|
result = None
|
||||||
except OSError:
|
except IOError:
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
euscan.output.eend(errno.ENOENT if not result else 0)
|
euscan.output.eend(errno.ENOENT if not result else 0)
|
||||||
@ -382,9 +383,9 @@ def regex_from_template(template):
|
|||||||
regexp = re.escape(template)
|
regexp = re.escape(template)
|
||||||
|
|
||||||
# Unescape specific stuff
|
# Unescape specific stuff
|
||||||
regexp = regexp.replace(r"\$\{", "${")
|
regexp = regexp.replace("\$\{", "${")
|
||||||
regexp = regexp.replace(r"\}", "}")
|
regexp = regexp.replace("\}", "}")
|
||||||
regexp = regexp.replace(r"}\.$", "}.$")
|
regexp = regexp.replace("}\.$", "}.$")
|
||||||
|
|
||||||
# Replace ${\d+}
|
# Replace ${\d+}
|
||||||
# regexp = regexp.replace('${0}', r'([\d]+?)')
|
# regexp = regexp.replace('${0}', r'([\d]+?)')
|
||||||
|
@ -19,7 +19,7 @@ from euscan.helpers import dict_to_xml
|
|||||||
mirrors_ = None
|
mirrors_ = None
|
||||||
|
|
||||||
|
|
||||||
class ProgressHandler:
|
class ProgressHandler(object):
|
||||||
def __init__(self, progress_bar):
|
def __init__(self, progress_bar):
|
||||||
self.curval = 0
|
self.curval = 0
|
||||||
self.maxval = 0
|
self.maxval = 0
|
||||||
@ -74,7 +74,7 @@ def progress_bar():
|
|||||||
|
|
||||||
def clean_colors(string):
|
def clean_colors(string):
|
||||||
if isinstance(string, str):
|
if isinstance(string, str):
|
||||||
string = re.sub(r"\033\[[0-9;]+m", "", string)
|
string = re.sub("\033\[[0-9;]+m", "", string)
|
||||||
string = re.sub(r"\\u001b\[[0-9;]+m", "", string)
|
string = re.sub(r"\\u001b\[[0-9;]+m", "", string)
|
||||||
string = re.sub(r"\x1b\[[0-9;]+m", "", string)
|
string = re.sub(r"\x1b\[[0-9;]+m", "", string)
|
||||||
return string
|
return string
|
||||||
@ -90,9 +90,9 @@ def transform_url(config, cpv, url):
|
|||||||
|
|
||||||
def to_ebuild_uri(cpv, url):
|
def to_ebuild_uri(cpv, url):
|
||||||
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
||||||
p = f"{pkg}-{ver}"
|
p = "%s-%s" % (pkg, ver)
|
||||||
pvr = f"{ver}{f'-{rev}' if rev != 'r0' else ''}"
|
pvr = "%s%s" % (ver, "-%s" % rev if rev != "r0" else "")
|
||||||
pf = f"{pkg}-{pvr}"
|
pf = "%s-%s" % (pkg, pvr)
|
||||||
evars = (
|
evars = (
|
||||||
(p, "P"),
|
(p, "P"),
|
||||||
(pkg, "PN"),
|
(pkg, "PN"),
|
||||||
@ -140,8 +140,10 @@ def to_mirror(url):
|
|||||||
for mirror_url in mirrors_[mirror_name]:
|
for mirror_url in mirrors_[mirror_name]:
|
||||||
if url.startswith(mirror_url):
|
if url.startswith(mirror_url):
|
||||||
url_part = url.split(mirror_url)[1]
|
url_part = url.split(mirror_url)[1]
|
||||||
return "mirror://{}{}{}".format(
|
return "mirror://%s%s%s" % (
|
||||||
mirror_name, "" if url_part.startswith("/") else "/", url_part
|
mirror_name,
|
||||||
|
"" if url_part.startswith("/") else "/",
|
||||||
|
url_part,
|
||||||
)
|
)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
@ -152,17 +154,17 @@ class EOutputMem(EOutput):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super(EOutputMem, self).__init__(*args, **kwargs)
|
||||||
self.out = StringIO()
|
self.out = StringIO()
|
||||||
|
|
||||||
def getvalue(self):
|
def getvalue(self):
|
||||||
return self.out.getvalue()
|
return self.out.getvalue()
|
||||||
|
|
||||||
def _write(self, f, msg):
|
def _write(self, f, msg):
|
||||||
super()._write(self.out, msg)
|
super(EOutputMem, self)._write(self.out, msg)
|
||||||
|
|
||||||
|
|
||||||
class EuscanOutput:
|
class EuscanOutput(object):
|
||||||
"""
|
"""
|
||||||
Class that handles output for euscan
|
Class that handles output for euscan
|
||||||
"""
|
"""
|
||||||
@ -218,7 +220,7 @@ class EuscanOutput:
|
|||||||
def result(self, cp, version, urls, handler, confidence):
|
def result(self, cp, version, urls, handler, confidence):
|
||||||
from euscan.version import get_version_type
|
from euscan.version import get_version_type
|
||||||
|
|
||||||
cpv = f"{cp}-{version}"
|
cpv = "%s-%s" % (cp, version)
|
||||||
urls = " ".join(transform_url(self.config, cpv, url) for url in urls.split())
|
urls = " ".join(transform_url(self.config, cpv, url) for url in urls.split())
|
||||||
|
|
||||||
if self.config["format"] in ["json", "dict"]:
|
if self.config["format"] in ["json", "dict"]:
|
||||||
@ -237,13 +239,13 @@ class EuscanOutput:
|
|||||||
print("Upstream Version:", pp.number("%s" % version), end=" ")
|
print("Upstream Version:", pp.number("%s" % version), end=" ")
|
||||||
print(pp.path(" %s" % urls))
|
print(pp.path(" %s" % urls))
|
||||||
else:
|
else:
|
||||||
print(pp.cpv(f"{cp}-{version}") + ":", pp.path(urls))
|
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
|
||||||
|
|
||||||
def metadata(self, key, value, show=True):
|
def metadata(self, key, value, show=True):
|
||||||
if self.config["format"]:
|
if self.config["format"]:
|
||||||
self.queries[self.current_query]["metadata"][key] = value
|
self.queries[self.current_query]["metadata"][key] = value
|
||||||
elif show:
|
elif show:
|
||||||
print(f"{key.capitalize()}: {value}")
|
print("%s: %s" % (key.capitalize(), value))
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
if not self.config["quiet"] and self.current_query is not None:
|
if not self.config["quiet"] and self.current_query is not None:
|
||||||
|
@ -76,14 +76,14 @@ def reload_gentoolkit():
|
|||||||
if not hasattr(gentoolkit.package, "PORTDB"):
|
if not hasattr(gentoolkit.package, "PORTDB"):
|
||||||
return
|
return
|
||||||
|
|
||||||
portdb = portage.db[portage.root]["porttree"].dbapi
|
PORTDB = portage.db[portage.root]["porttree"].dbapi
|
||||||
|
|
||||||
if hasattr(gentoolkit.dbapi, "PORTDB"):
|
if hasattr(gentoolkit.dbapi, "PORTDB"):
|
||||||
gentoolkit.dbapi.PORTDB = portdb
|
gentoolkit.dbapi.PORTDB = PORTDB
|
||||||
if hasattr(gentoolkit.package, "PORTDB"):
|
if hasattr(gentoolkit.package, "PORTDB"):
|
||||||
gentoolkit.package.PORTDB = portdb
|
gentoolkit.package.PORTDB = PORTDB
|
||||||
if hasattr(gentoolkit.query, "PORTDB"):
|
if hasattr(gentoolkit.query, "PORTDB"):
|
||||||
gentoolkit.query.PORTDB = portdb
|
gentoolkit.query.PORTDB = PORTDB
|
||||||
|
|
||||||
|
|
||||||
def scan_upstream(query, on_progress=None):
|
def scan_upstream(query, on_progress=None):
|
||||||
@ -134,7 +134,7 @@ def scan_upstream(query, on_progress=None):
|
|||||||
|
|
||||||
if not CONFIG["quiet"]:
|
if not CONFIG["quiet"]:
|
||||||
if not CONFIG["format"]:
|
if not CONFIG["format"]:
|
||||||
pp.uprint(f" * {pp.cpv(pkg.cpv)} [{pp.section(pkg.repo_name())}]")
|
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
|
||||||
pp.uprint()
|
pp.uprint()
|
||||||
else:
|
else:
|
||||||
output.metadata("overlay", pp.section(pkg.repo_name()))
|
output.metadata("overlay", pp.section(pkg.repo_name()))
|
||||||
|
@ -22,7 +22,7 @@ def get_version_type(version):
|
|||||||
if "9999" in version or "99999999" in version:
|
if "9999" in version or "99999999" in version:
|
||||||
return "live"
|
return "live"
|
||||||
|
|
||||||
for token in re.findall(r"[\._-]([a-zA-Z]+)", version):
|
for token in re.findall("[\._-]([a-zA-Z]+)", version):
|
||||||
if token in gentoo_types:
|
if token in gentoo_types:
|
||||||
types.append(token)
|
types.append(token)
|
||||||
if types:
|
if types:
|
||||||
|
Loading…
Reference in New Issue
Block a user