Compare commits

..

10 Commits

Author SHA1 Message Date
Alfred Wingate
443b5f62fd
Enable flake8-bugbear linting and fix raised issues
Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 06:25:40 +02:00
Alfred Wingate
49f1fbbad1
Remove Python2'isms from classes
Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 06:18:19 +02:00
Alfred Wingate
a03b420c75
Use OSError instead of alias IOError
https://docs.astral.sh/ruff/rules/os-error-alias/

Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 06:15:48 +02:00
Alfred Wingate
6c0b816e73
Use f-strings or .format() over percent style
https://docs.astral.sh/ruff/rules/printf-string-formatting/

Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 05:56:40 +02:00
Alfred Wingate
21fe4eafec
Enable pyupgrade linting
Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 05:21:49 +02:00
Alfred Wingate
377ba2f727
N806
Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 05:19:02 +02:00
Alfred Wingate
9f7ba6c9cd
Address N818
https://peps.python.org/pep-0008/#exception-names

Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 05:17:14 +02:00
Alfred Wingate
764bcf9ce8
Enable PEP8 linting in ruff
Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 05:16:25 +02:00
Alfred Wingate
c0be0e0b67
Fix invalid backslash characters
* https://docs.python.org/3/whatsnew/3.6.html#deprecated-python-behavior

Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 05:10:40 +02:00
Alfred Wingate
d48699e5fd
Enable all pycodestyle checks in ruff
Signed-off-by: Alfred Wingate <parona@protonmail.com>
2023-11-16 05:02:30 +02:00
18 changed files with 86 additions and 102 deletions

View File

@ -199,7 +199,7 @@ def print_usage(_error=None, help=None):
# turquoise("`man %s`" % __productname__), file=out) # turquoise("`man %s`" % __productname__), file=out)
class ParseArgsException(Exception): class ParseArgsError(Exception):
"""For parseArgs() -> main() communications.""" """For parseArgs() -> main() communications."""
def __init__(self, value): def __init__(self, value):
@ -220,9 +220,9 @@ def parse_args():
return_code = True return_code = True
for o, a in opts: for o, a in opts:
if o in ("-h", "--help"): if o in ("-h", "--help"):
raise ParseArgsException("help") raise ParseArgsError("help")
elif o in ("-V", "--version"): elif o in ("-V", "--version"):
raise ParseArgsException("version") raise ParseArgsError("version")
elif o in ("-C", "--nocolor"): elif o in ("-C", "--nocolor"):
CONFIG["nocolor"] = True CONFIG["nocolor"] = True
pp.output.nocolor() pp.output.nocolor()
@ -283,14 +283,14 @@ def parse_args():
# apply getopts to command line, show partial help on failure # apply getopts to command line, show partial help on failure
try: try:
opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts) opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
except getopt.GetoptError: except getopt.GetoptError as exc:
raise ParseArgsException(opts_mode + "-options") raise ParseArgsError(opts_mode + "-options") from exc
# set options accordingly # set options accordingly
option_switch(opts) option_switch(opts)
if len(args) < 1: if len(args) < 1:
raise ParseArgsException("packages") raise ParseArgsError("packages")
return args return args
@ -306,7 +306,7 @@ def main():
# parse command line options and actions # parse command line options and actions
try: try:
queries = parse_args() queries = parse_args()
except ParseArgsException as e: except ParseArgsError as e:
if e.value == "help": if e.value == "help":
print_usage(help="all") print_usage(help="all")
exit_helper(0) exit_helper(0)
@ -362,7 +362,7 @@ def main():
exit_helper(1) exit_helper(1)
except GentoolkitException as err: except GentoolkitException as err:
output.eerror("%s: %s" % (query, str(err))) output.eerror(f"{query}: {str(err)}")
exit_helper(1) exit_helper(1)
except Exception as err: except Exception as err:
@ -372,7 +372,7 @@ def main():
traceback.print_exc(file=sys.stderr) traceback.print_exc(file=sys.stderr)
print("-" * 60) print("-" * 60)
output.eerror("%s: %s" % (query, str(err))) output.eerror(f"{query}: {str(err)}")
exit_helper(1) exit_helper(1)
if not ret and not CONFIG["quiet"]: if not ret and not CONFIG["quiet"]:

View File

@ -30,10 +30,10 @@ def guess_indent_values(before):
def guess_for_tags(tags): def guess_for_tags(tags):
for tag in tags: for tag in tags:
for i in [0, 2, 4, 6, 8, 12, 16]: for i in [0, 2, 4, 6, 8, 12, 16]:
if "\n%s<%s" % (" " * i, tag) in before: if f"\n{' ' * i}<{tag}" in before:
return i, False return i, False
for i in [0, 1, 2]: for i in [0, 1, 2]:
if "\n%s<%s" % ("\t" * i, tag) in before: if f"\n{'\t' * i}<{tag}" in before:
return i, True return i, True
return -1, False return -1, False
@ -119,11 +119,11 @@ def get_deb_url(name):
content = opened.read() content = opened.read()
for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer("a")): for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer("a")):
if re.match("[^\s]+\.debian\.tar\.(?:gz|bz2)", link.text): if re.match(r"[^\s]+\.debian\.tar\.(?:gz|bz2)", link.text):
deb_url = link["href"] deb_url = link["href"]
deb_type = "source" deb_type = "source"
break break
if re.match("[^\s]+\.diff\.gz", link.text): if re.match(r"[^\s]+\.diff\.gz", link.text):
deb_url = link["href"] deb_url = link["href"]
deb_type = "diff" deb_type = "diff"
break break
@ -157,7 +157,7 @@ def patch_metadata(package, watch_data, diff=False):
for watch_line in watch_data.split("\n"): # there can be multiple lines for watch_line in watch_data.split("\n"): # there can be multiple lines
watch_line = " ".join(watch_line.split()) # remove extra spaces and \n watch_line = " ".join(watch_line.split()) # remove extra spaces and \n
version_parse = re.match("version=(\d+?)", watch_line) version_parse = re.match(r"version=(\d+?)", watch_line)
if version_parse: if version_parse:
version = version_parse.group(1) version = version_parse.group(1)
continue continue
@ -180,7 +180,7 @@ def patch_metadata(package, watch_data, diff=False):
if opt_name in valid: if opt_name in valid:
if opt_name == "uversionmangle": if opt_name == "uversionmangle":
opt_name = "versionmangle" opt_name = "versionmangle"
cleaned_opts.append('%s="%s"' % (opt_name, opt_value)) cleaned_opts.append(f'{opt_name}="{opt_value}"')
opts = " ".join(cleaned_opts) opts = " ".join(cleaned_opts)
# clean url from useless stuff. Just keep <base> [<filepattern>] # clean url from useless stuff. Just keep <base> [<filepattern>]
@ -188,14 +188,9 @@ def patch_metadata(package, watch_data, diff=False):
url = " ".join([x for x in url_search.groups() if x is not None]) url = " ".join([x for x in url_search.groups() if x is not None])
if opts: if opts:
watch_tag = '%s<watch version="%s" %s>%s</watch>' % ( watch_tag = f'{indent}<watch version="{version}" {opts}>{url}</watch>'
indent,
version,
opts,
url,
)
else: else:
watch_tag = '%s<watch version="%s">%s</watch>' % (indent, version, url) watch_tag = f'{indent}<watch version="{version}">{url}</watch>'
watch_tags.append(watch_tag) watch_tags.append(watch_tag)
watch_tags = "\n".join(watch_tags) watch_tags = "\n".join(watch_tags)
@ -203,11 +198,7 @@ def patch_metadata(package, watch_data, diff=False):
if "<upstream>" in data: if "<upstream>" in data:
data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1) data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1)
else: else:
rep = "%s<upstream>\n%s\n%s</upstream>\n</pkgmetadata>" % ( rep = f"{rindent}<upstream>\n{watch_tags}\n{rindent}</upstream>\n</pkgmetadata>"
rindent,
watch_tags,
rindent,
)
data = data.replace("</pkgmetadata>", rep, 1) data = data.replace("</pkgmetadata>", rep, 1)
if not diff: if not diff:

View File

@ -40,3 +40,6 @@ src_paths = ["bin/euscan", "src/euscan/"]
[tool.ruff] [tool.ruff]
extend-include = ["bin/euscan", "bin/euscan_patch_metadata"] extend-include = ["bin/euscan", "bin/euscan_patch_metadata"]
[tool.ruff.lint]
extend-select = ["B", "E", "N", "UP", "W"]

View File

@ -74,13 +74,13 @@ BRUTEFORCE_BLACKLIST_URLS = [
ROBOTS_TXT_BLACKLIST_DOMAINS = [ ROBOTS_TXT_BLACKLIST_DOMAINS = [
"(.*)sourceforge(.*)", "(.*)sourceforge(.*)",
"(.*)github.com", "(.*)github.com",
"(.*)qt\.nokia\.com(.*)", r"(.*)qt\.nokia\.com(.*)",
"(.*)chromium\.org(.*)", r"(.*)chromium\.org(.*)",
"(.*)nodejs\.org(.*)", r"(.*)nodejs\.org(.*)",
"(.*)download\.mono-project\.com(.*)", r"(.*)download\.mono-project\.com(.*)",
"(.*)fedorahosted\.org(.*)", r"(.*)fedorahosted\.org(.*)",
"(.*)download\.tuxfamily\.org(.*)", r"(.*)download\.tuxfamily\.org(.*)",
"(.*)festvox\.org(.*)", r"(.*)festvox\.org(.*)",
] ]
from euscan.out import EuscanOutput # noqa: E402 from euscan.out import EuscanOutput # noqa: E402

View File

@ -71,7 +71,7 @@ def package_from_ebuild(ebuild):
return False return False
ebuild_split = ebuild.split("/") ebuild_split = ebuild.split("/")
cpv = "%s/%s" % (ebuild_split[-3], pf) cpv = f"{ebuild_split[-3]}/{pf}"
if not portage.catpkgsplit(cpv): if not portage.catpkgsplit(cpv):
return False return False

View File

@ -13,7 +13,7 @@ from euscan import CONFIG, output
handlers = {"package": [], "url": [], "all": {}} handlers = {"package": [], "url": [], "all": {}}
# autoimport all modules in this directory and append them to handlers list # autoimport all modules in this directory and append them to handlers list
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__): for loader, module_name, _is_pkg in pkgutil.walk_packages(__path__):
module = loader.find_spec(module_name).loader.load_module(module_name) module = loader.find_spec(module_name).loader.load_module(module_name)
if not hasattr(module, "HANDLER_NAME"): if not hasattr(module, "HANDLER_NAME"):
continue continue
@ -157,7 +157,7 @@ def scan_url(pkg, urls, options, on_progress=None):
else: else:
output.eerror("Can't find a suitable handler!") output.eerror("Can't find a suitable handler!")
except Exception as e: except Exception as e:
output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, str(e))) output.ewarn(f"Handler failed: [{e.__class__.__name__}] {str(e)}")
if versions and CONFIG["oneshot"]: if versions and CONFIG["oneshot"]:
break break

View File

@ -81,7 +81,7 @@ def mangle_version(up_pv):
pv = ".".join(groups) pv = ".".join(groups)
if rc_part: if rc_part:
pv = "%s_rc%s" % (pv, rc_part) pv = f"{pv}_rc{rc_part}"
return pv return pv
@ -128,7 +128,7 @@ def scan_pkg(pkg, options):
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib.error.URLError: except urllib.error.URLError:
return [] return []
except IOError: except OSError:
return [] return []
if not fp: if not fp:
@ -157,13 +157,7 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp): if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
continue continue
url = "mirror://cpan/authors/id/%s/%s/%s/%s" % ( url = f"mirror://cpan/authors/id/{version['cpanid'][0]}/{version['cpanid'][0:1]}/{version['cpanid']}/{version['archive']}"
version["cpanid"][0],
version["cpanid"][0:1],
version["cpanid"],
version["archive"],
)
url = mangling.mangle_url(url, options) url = mangling.mangle_url(url, options)
ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

View File

@ -40,7 +40,7 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, ver, pv): if helpers.version_filtered(cp, ver, pv):
continue continue
fp = urllib.request.urlopen( fp = urllib.request.urlopen(
"http://freecode.com/projects/%s/releases/%s" % (package, release_id) f"http://freecode.com/projects/{package}/releases/{release_id}"
) )
content = str(fp.read()) content = str(fp.read())
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0] download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]

View File

@ -112,7 +112,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib.error.URLError: except urllib.error.URLError:
return [] return []
except IOError: except OSError:
return [] return []
if not fp: if not fp:
@ -122,7 +122,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
results = [] results = []
if re.search(b"<\s*a\s+[^>]*href", data, re.I): if re.search(rb"<\s*a\s+[^>]*href", data, re.I):
results.extend(scan_html(data, url, pattern)) results.extend(scan_html(data, url, pattern))
elif url.startswith("ftp://"): elif url.startswith("ftp://"):
results.extend(scan_ftp(data, url, pattern)) results.extend(scan_ftp(data, url, pattern))
@ -154,7 +154,7 @@ def scan_url(pkg, url, options):
if CONFIG["scan-dir"]: if CONFIG["scan-dir"]:
for bu in SCANDIR_BLACKLIST_URLS: for bu in SCANDIR_BLACKLIST_URLS:
if re.match(bu, url): if re.match(bu, url):
output.einfo("%s is blacklisted by rule %s" % (url, bu)) output.einfo(f"{url} is blacklisted by rule {bu}")
return [] return []
resolved_url = helpers.parse_mirror(url) resolved_url = helpers.parse_mirror(url)
@ -167,14 +167,15 @@ def scan_url(pkg, url, options):
if ver not in resolved_url: if ver not in resolved_url:
newver = helpers.version_change_end_sep(ver) newver = helpers.version_change_end_sep(ver)
if newver and newver in resolved_url: if newver and newver in resolved_url:
output.einfo("Version: using %s instead of %s" % (newver, ver)) output.einfo(f"Version: using {newver} instead of {ver}")
ver = newver ver = newver
template = helpers.template_from_url(resolved_url, ver) template = helpers.template_from_url(resolved_url, ver)
if "${" not in template: if "${" not in template:
output.einfo( output.einfo(
"Url doesn't seems to depend on version: %s not found in %s" "Url doesn't seems to depend on version: {} not found in {}".format(
% (ver, resolved_url) ver, resolved_url
)
) )
return [] return []
else: else:
@ -201,12 +202,12 @@ def brute_force(pkg, url):
for bp in BRUTEFORCE_BLACKLIST_PACKAGES: for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
if re.match(bp, cp): if re.match(bp, cp):
output.einfo("%s is blacklisted by rule %s" % (cp, bp)) output.einfo(f"{cp} is blacklisted by rule {bp}")
return [] return []
for bp in BRUTEFORCE_BLACKLIST_URLS: for bp in BRUTEFORCE_BLACKLIST_URLS:
if re.match(bp, url): if re.match(bp, url):
output.einfo("%s is blacklisted by rule %s" % (cp, bp)) output.einfo(f"{cp} is blacklisted by rule {bp}")
return [] return []
output.einfo("Generating version from " + ver) output.einfo("Generating version from " + ver)
@ -227,8 +228,7 @@ def brute_force(pkg, url):
if "${PV}" not in template: if "${PV}" not in template:
output.einfo( output.einfo(
"Url doesn't seems to depend on full version: %s not found in %s" f"Url doesn't seems to depend on full version: {ver} not found in {url}"
% (ver, url)
) )
return [] return []
else: else:

View File

@ -44,12 +44,11 @@ def scan_url(pkg, url, options):
fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)")) fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)"))
output.einfo( output.einfo(
"Using github API for: project=%s user=%s filename=%s" f"Using github API for: project={project} user={user} filename={filename}"
% (project, user, filename)
) )
dlreq = urllib.request.urlopen( dlreq = urllib.request.urlopen(
"https://api.github.com/repos/%s/%s/downloads" % (user, project) f"https://api.github.com/repos/{user}/{project}/downloads"
) )
dls = json.load(dlreq) dls = json.load(dlreq)

View File

@ -20,7 +20,7 @@ def can_handle(pkg, url=None):
def guess_package_and_channel(cp, url): def guess_package_and_channel(cp, url):
match = re.search("http://(.*)\.php\.net/get/(.*)-(.*).tgz", url) match = re.search(r"http://(.*)\.php\.net/get/(.*)-(.*).tgz", url)
if match: if match:
host = match.group(1) host = match.group(1)
@ -42,7 +42,7 @@ def scan_pkg(pkg, options):
package = options["data"] package = options["data"]
channel = options["type"] channel = options["type"]
url = "http://%s.php.net/rest/r/%s/allreleases.xml" % (channel, package.lower()) url = f"http://{channel}.php.net/rest/r/{package.lower()}/allreleases.xml"
output.einfo("Using: " + url) output.einfo("Using: " + url)
@ -50,7 +50,7 @@ def scan_pkg(pkg, options):
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib.error.URLError: except urllib.error.URLError:
return [] return []
except IOError: except OSError:
return [] return []
if not fp: if not fp:
@ -69,7 +69,7 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, ver, pv): if helpers.version_filtered(cp, ver, pv):
continue continue
url = "http://%s.php.net/get/%s-%s.tgz" % (channel, package, up_pv) url = f"http://{channel}.php.net/get/{package}-{up_pv}.tgz"
url = mangling.mangle_url(url, options) url = mangling.mangle_url(url, options)
ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

View File

@ -19,7 +19,7 @@ def can_handle(pkg, url=None):
def guess_package(cp, url): def guess_package(cp, url):
match = re.search("mirror://pypi/\w+/(.*)/.*", url) match = re.search(r"mirror://pypi/\w+/(.*)/.*", url)
if match: if match:
return match.group(1) return match.group(1)

View File

@ -42,7 +42,7 @@ def scan_url(pkg, url, options):
gem = guess_gem(pkg.cpv, url) gem = guess_gem(pkg.cpv, url)
if not gem: if not gem:
output.eerror("Can't guess gem name using %s and %s" % (pkg.cpv, url)) output.eerror(f"Can't guess gem name using {pkg.cpv} and {url}")
return [] return []
output.einfo("Using RubyGem API: %s" % gem) output.einfo("Using RubyGem API: %s" % gem)
@ -58,7 +58,7 @@ def scan_pkg(pkg, options):
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib.error.URLError: except urllib.error.URLError:
return [] return []
except IOError: except OSError:
return [] return []
if not fp: if not fp:
@ -75,7 +75,7 @@ def scan_pkg(pkg, options):
pv = mangling.mangle_version(up_pv, options) pv = mangling.mangle_version(up_pv, options)
if helpers.version_filtered(cp, ver, pv): if helpers.version_filtered(cp, ver, pv):
continue continue
url = "http://rubygems.org/gems/%s-%s.gem" % (gem, up_pv) url = f"http://rubygems.org/gems/{gem}-{up_pv}.gem"
url = mangling.mangle_url(url, options) url = mangling.mangle_url(url, options)
ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
return ret return ret

View File

@ -24,7 +24,7 @@ def can_handle(*args):
def handle_directory_patterns(base, file_pattern): def handle_directory_patterns(base, file_pattern):
""" r"""
Directory pattern matching Directory pattern matching
e.g.: base: ftp://ftp.nessus.org/pub/nessus/nessus-([\d\.]+)/src/ e.g.: base: ftp://ftp.nessus.org/pub/nessus/nessus-([\d\.]+)/src/
file_pattern: nessus-core-([\d\.]+)\.tar\.gz file_pattern: nessus-core-([\d\.]+)\.tar\.gz
@ -45,7 +45,7 @@ def handle_directory_patterns(base, file_pattern):
fp = helpers.urlopen(basedir) fp = helpers.urlopen(basedir)
except urllib.error.URLError: except urllib.error.URLError:
return [] return []
except IOError: except OSError:
return [] return []
if not fp: if not fp:

View File

@ -83,7 +83,7 @@ def version_is_nightly(a, b):
def version_blacklisted(cp, version): def version_blacklisted(cp, version):
rule = None rule = None
cpv = "%s-%s" % (cp, version) cpv = f"{cp}-{version}"
# Check that the generated cpv can be used by portage # Check that the generated cpv can be used by portage
if not portage.versions.catpkgsplit(cpv): if not portage.versions.catpkgsplit(cpv):
@ -92,10 +92,9 @@ def version_blacklisted(cp, version):
for bv in BLACKLIST_VERSIONS: for bv in BLACKLIST_VERSIONS:
if dep.match_from_list(bv, [cpv]): if dep.match_from_list(bv, [cpv]):
rule = bv rule = bv
None
if rule: if rule:
euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, rule)) euscan.output.einfo(f"{cpv} is blacklisted by rule {rule}")
return rule is not None return rule is not None
@ -223,7 +222,7 @@ def gen_versions(components, level):
for i in range(n, n - level, -1): for i in range(n, n - level, -1):
increment_version(components, i - 1) increment_version(components, i - 1)
for j in range(depth): for _j in range(depth):
versions.append(list(components)) versions.append(list(components))
increment_version(components, i - 1) increment_version(components, i - 1)
@ -264,7 +263,7 @@ def urlallowed(url):
if protocol == "ftp": if protocol == "ftp":
return True return True
baseurl = "%s://%s" % (protocol, domain) baseurl = f"{protocol}://{domain}"
robotsurl = urllib.parse.urljoin(baseurl, "robots.txt") robotsurl = urllib.parse.urljoin(baseurl, "robots.txt")
if baseurl in rpcache: if baseurl in rpcache:
@ -280,7 +279,7 @@ def urlallowed(url):
try: try:
rp.read() rp.read()
rpcache[baseurl] = rp rpcache[baseurl] = rp
except IOError: except OSError:
rp = None rp = None
setdefaulttimeout(timeout) setdefaulttimeout(timeout)
@ -290,7 +289,7 @@ def urlallowed(url):
def urlopen(url, timeout=None, verb="GET"): def urlopen(url, timeout=None, verb="GET"):
if not urlallowed(url): if not urlallowed(url):
euscan.output.einfo("Url '%s' blocked by robots.txt" % url) euscan.output.einfo(f"Url '{url}' blocked by robots.txt")
return None return None
if not timeout: if not timeout:
@ -370,7 +369,7 @@ def tryurl(fileurl, template):
except urllib.error.URLError: except urllib.error.URLError:
result = None result = None
except IOError: except OSError:
result = None result = None
euscan.output.eend(errno.ENOENT if not result else 0) euscan.output.eend(errno.ENOENT if not result else 0)
@ -383,9 +382,9 @@ def regex_from_template(template):
regexp = re.escape(template) regexp = re.escape(template)
# Unescape specific stuff # Unescape specific stuff
regexp = regexp.replace("\$\{", "${") regexp = regexp.replace(r"\$\{", "${")
regexp = regexp.replace("\}", "}") regexp = regexp.replace(r"\}", "}")
regexp = regexp.replace("}\.$", "}.$") regexp = regexp.replace(r"}\.$", "}.$")
# Replace ${\d+} # Replace ${\d+}
# regexp = regexp.replace('${0}', r'([\d]+?)') # regexp = regexp.replace('${0}', r'([\d]+?)')

View File

@ -19,7 +19,7 @@ from euscan.helpers import dict_to_xml
mirrors_ = None mirrors_ = None
class ProgressHandler(object): class ProgressHandler:
def __init__(self, progress_bar): def __init__(self, progress_bar):
self.curval = 0 self.curval = 0
self.maxval = 0 self.maxval = 0
@ -74,7 +74,7 @@ def progress_bar():
def clean_colors(string): def clean_colors(string):
if isinstance(string, str): if isinstance(string, str):
string = re.sub("\033\[[0-9;]+m", "", string) string = re.sub(r"\033\[[0-9;]+m", "", string)
string = re.sub(r"\\u001b\[[0-9;]+m", "", string) string = re.sub(r"\\u001b\[[0-9;]+m", "", string)
string = re.sub(r"\x1b\[[0-9;]+m", "", string) string = re.sub(r"\x1b\[[0-9;]+m", "", string)
return string return string
@ -90,9 +90,9 @@ def transform_url(config, cpv, url):
def to_ebuild_uri(cpv, url): def to_ebuild_uri(cpv, url):
cat, pkg, ver, rev = portage.catpkgsplit(cpv) cat, pkg, ver, rev = portage.catpkgsplit(cpv)
p = "%s-%s" % (pkg, ver) p = f"{pkg}-{ver}"
pvr = "%s%s" % (ver, "-%s" % rev if rev != "r0" else "") pvr = f"{ver}{f'-{rev}' if rev != 'r0' else ''}"
pf = "%s-%s" % (pkg, pvr) pf = f"{pkg}-{pvr}"
evars = ( evars = (
(p, "P"), (p, "P"),
(pkg, "PN"), (pkg, "PN"),
@ -140,10 +140,8 @@ def to_mirror(url):
for mirror_url in mirrors_[mirror_name]: for mirror_url in mirrors_[mirror_name]:
if url.startswith(mirror_url): if url.startswith(mirror_url):
url_part = url.split(mirror_url)[1] url_part = url.split(mirror_url)[1]
return "mirror://%s%s%s" % ( return "mirror://{}{}{}".format(
mirror_name, mirror_name, "" if url_part.startswith("/") else "/", url_part
"" if url_part.startswith("/") else "/",
url_part,
) )
return url return url
@ -154,17 +152,17 @@ class EOutputMem(EOutput):
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(EOutputMem, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.out = StringIO() self.out = StringIO()
def getvalue(self): def getvalue(self):
return self.out.getvalue() return self.out.getvalue()
def _write(self, f, msg): def _write(self, f, msg):
super(EOutputMem, self)._write(self.out, msg) super()._write(self.out, msg)
class EuscanOutput(object): class EuscanOutput:
""" """
Class that handles output for euscan Class that handles output for euscan
""" """
@ -220,7 +218,7 @@ class EuscanOutput(object):
def result(self, cp, version, urls, handler, confidence): def result(self, cp, version, urls, handler, confidence):
from euscan.version import get_version_type from euscan.version import get_version_type
cpv = "%s-%s" % (cp, version) cpv = f"{cp}-{version}"
urls = " ".join(transform_url(self.config, cpv, url) for url in urls.split()) urls = " ".join(transform_url(self.config, cpv, url) for url in urls.split())
if self.config["format"] in ["json", "dict"]: if self.config["format"] in ["json", "dict"]:
@ -239,13 +237,13 @@ class EuscanOutput(object):
print("Upstream Version:", pp.number("%s" % version), end=" ") print("Upstream Version:", pp.number("%s" % version), end=" ")
print(pp.path(" %s" % urls)) print(pp.path(" %s" % urls))
else: else:
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls)) print(pp.cpv(f"{cp}-{version}") + ":", pp.path(urls))
def metadata(self, key, value, show=True): def metadata(self, key, value, show=True):
if self.config["format"]: if self.config["format"]:
self.queries[self.current_query]["metadata"][key] = value self.queries[self.current_query]["metadata"][key] = value
elif show: elif show:
print("%s: %s" % (key.capitalize(), value)) print(f"{key.capitalize()}: {value}")
def __getattr__(self, key): def __getattr__(self, key):
if not self.config["quiet"] and self.current_query is not None: if not self.config["quiet"] and self.current_query is not None:

View File

@ -76,14 +76,14 @@ def reload_gentoolkit():
if not hasattr(gentoolkit.package, "PORTDB"): if not hasattr(gentoolkit.package, "PORTDB"):
return return
PORTDB = portage.db[portage.root]["porttree"].dbapi portdb = portage.db[portage.root]["porttree"].dbapi
if hasattr(gentoolkit.dbapi, "PORTDB"): if hasattr(gentoolkit.dbapi, "PORTDB"):
gentoolkit.dbapi.PORTDB = PORTDB gentoolkit.dbapi.PORTDB = portdb
if hasattr(gentoolkit.package, "PORTDB"): if hasattr(gentoolkit.package, "PORTDB"):
gentoolkit.package.PORTDB = PORTDB gentoolkit.package.PORTDB = portdb
if hasattr(gentoolkit.query, "PORTDB"): if hasattr(gentoolkit.query, "PORTDB"):
gentoolkit.query.PORTDB = PORTDB gentoolkit.query.PORTDB = portdb
def scan_upstream(query, on_progress=None): def scan_upstream(query, on_progress=None):
@ -134,7 +134,7 @@ def scan_upstream(query, on_progress=None):
if not CONFIG["quiet"]: if not CONFIG["quiet"]:
if not CONFIG["format"]: if not CONFIG["format"]:
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))) pp.uprint(f" * {pp.cpv(pkg.cpv)} [{pp.section(pkg.repo_name())}]")
pp.uprint() pp.uprint()
else: else:
output.metadata("overlay", pp.section(pkg.repo_name())) output.metadata("overlay", pp.section(pkg.repo_name()))

View File

@ -22,7 +22,7 @@ def get_version_type(version):
if "9999" in version or "99999999" in version: if "9999" in version or "99999999" in version:
return "live" return "live"
for token in re.findall("[\._-]([a-zA-Z]+)", version): for token in re.findall(r"[\._-]([a-zA-Z]+)", version):
if token in gentoo_types: if token in gentoo_types:
types.append(token) types.append(token)
if types: if types: