Use f-strings or .format() over percent style

https://docs.astral.sh/ruff/rules/printf-string-formatting/

Signed-off-by: Alfred Wingate <parona@protonmail.com>
This commit is contained in:
Alfred Wingate 2023-11-16 05:56:40 +02:00
parent 21fe4eafec
commit 6c0b816e73
No known key found for this signature in database
GPG Key ID: A12750536B5E7010
13 changed files with 40 additions and 58 deletions

View File

@ -362,7 +362,7 @@ def main():
exit_helper(1)
except GentoolkitException as err:
output.eerror("%s: %s" % (query, str(err)))
output.eerror(f"{query}: {str(err)}")
exit_helper(1)
except Exception as err:
@ -372,7 +372,7 @@ def main():
traceback.print_exc(file=sys.stderr)
print("-" * 60)
output.eerror("%s: %s" % (query, str(err)))
output.eerror(f"{query}: {str(err)}")
exit_helper(1)
if not ret and not CONFIG["quiet"]:

View File

@ -30,10 +30,10 @@ def guess_indent_values(before):
def guess_for_tags(tags):
for tag in tags:
for i in [0, 2, 4, 6, 8, 12, 16]:
if "\n%s<%s" % (" " * i, tag) in before:
if f"\n{' ' * i}<{tag}" in before:
return i, False
for i in [0, 1, 2]:
if "\n%s<%s" % ("\t" * i, tag) in before:
if f"\n{'\t' * i}<{tag}" in before:
return i, True
return -1, False
@ -180,7 +180,7 @@ def patch_metadata(package, watch_data, diff=False):
if opt_name in valid:
if opt_name == "uversionmangle":
opt_name = "versionmangle"
cleaned_opts.append('%s="%s"' % (opt_name, opt_value))
cleaned_opts.append(f'{opt_name}="{opt_value}"')
opts = " ".join(cleaned_opts)
# clean url from useless stuff. Just keep <base> [<filepattern>]
@ -188,14 +188,9 @@ def patch_metadata(package, watch_data, diff=False):
url = " ".join([x for x in url_search.groups() if x is not None])
if opts:
watch_tag = '%s<watch version="%s" %s>%s</watch>' % (
indent,
version,
opts,
url,
)
watch_tag = f'{indent}<watch version="{version}" {opts}>{url}</watch>'
else:
watch_tag = '%s<watch version="%s">%s</watch>' % (indent, version, url)
watch_tag = f'{indent}<watch version="{version}">{url}</watch>'
watch_tags.append(watch_tag)
watch_tags = "\n".join(watch_tags)
@ -203,11 +198,7 @@ def patch_metadata(package, watch_data, diff=False):
if "<upstream>" in data:
data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1)
else:
rep = "%s<upstream>\n%s\n%s</upstream>\n</pkgmetadata>" % (
rindent,
watch_tags,
rindent,
)
rep = f"{rindent}<upstream>\n{watch_tags}\n{rindent}</upstream>\n</pkgmetadata>"
data = data.replace("</pkgmetadata>", rep, 1)
if not diff:

View File

@ -71,7 +71,7 @@ def package_from_ebuild(ebuild):
return False
ebuild_split = ebuild.split("/")
cpv = "%s/%s" % (ebuild_split[-3], pf)
cpv = f"{ebuild_split[-3]}/{pf}"
if not portage.catpkgsplit(cpv):
return False

View File

@ -157,7 +157,7 @@ def scan_url(pkg, urls, options, on_progress=None):
else:
output.eerror("Can't find a suitable handler!")
except Exception as e:
output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, str(e)))
output.ewarn(f"Handler failed: [{e.__class__.__name__}] {str(e)}")
if versions and CONFIG["oneshot"]:
break

View File

@ -81,7 +81,7 @@ def mangle_version(up_pv):
pv = ".".join(groups)
if rc_part:
pv = "%s_rc%s" % (pv, rc_part)
pv = f"{pv}_rc{rc_part}"
return pv
@ -157,13 +157,7 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
continue
url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
version["cpanid"][0],
version["cpanid"][0:1],
version["cpanid"],
version["archive"],
)
url = f"mirror://cpan/authors/id/{version['cpanid'][0]}/{version['cpanid'][0:1]}/{version['cpanid']}/{version['archive']}"
url = mangling.mangle_url(url, options)
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

View File

@ -40,7 +40,7 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, ver, pv):
continue
fp = urllib.request.urlopen(
"http://freecode.com/projects/%s/releases/%s" % (package, release_id)
f"http://freecode.com/projects/{package}/releases/{release_id}"
)
content = str(fp.read())
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]

View File

@ -154,7 +154,7 @@ def scan_url(pkg, url, options):
if CONFIG["scan-dir"]:
for bu in SCANDIR_BLACKLIST_URLS:
if re.match(bu, url):
output.einfo("%s is blacklisted by rule %s" % (url, bu))
output.einfo(f"{url} is blacklisted by rule {bu}")
return []
resolved_url = helpers.parse_mirror(url)
@ -167,14 +167,15 @@ def scan_url(pkg, url, options):
if ver not in resolved_url:
newver = helpers.version_change_end_sep(ver)
if newver and newver in resolved_url:
output.einfo("Version: using %s instead of %s" % (newver, ver))
output.einfo(f"Version: using {newver} instead of {ver}")
ver = newver
template = helpers.template_from_url(resolved_url, ver)
if "${" not in template:
output.einfo(
"Url doesn't seems to depend on version: %s not found in %s"
% (ver, resolved_url)
"Url doesn't seems to depend on version: {} not found in {}".format(
ver, resolved_url
)
)
return []
else:
@ -201,12 +202,12 @@ def brute_force(pkg, url):
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
if re.match(bp, cp):
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
output.einfo(f"{cp} is blacklisted by rule {bp}")
return []
for bp in BRUTEFORCE_BLACKLIST_URLS:
if re.match(bp, url):
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
output.einfo(f"{cp} is blacklisted by rule {bp}")
return []
output.einfo("Generating version from " + ver)
@ -227,8 +228,7 @@ def brute_force(pkg, url):
if "${PV}" not in template:
output.einfo(
"Url doesn't seems to depend on full version: %s not found in %s"
% (ver, url)
f"Url doesn't seems to depend on full version: {ver} not found in {url}"
)
return []
else:

View File

@ -44,12 +44,11 @@ def scan_url(pkg, url, options):
fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)"))
output.einfo(
"Using github API for: project=%s user=%s filename=%s"
% (project, user, filename)
f"Using github API for: project={project} user={user} filename={filename}"
)
dlreq = urllib.request.urlopen(
"https://api.github.com/repos/%s/%s/downloads" % (user, project)
f"https://api.github.com/repos/{user}/{project}/downloads"
)
dls = json.load(dlreq)

View File

@ -42,7 +42,7 @@ def scan_pkg(pkg, options):
package = options["data"]
channel = options["type"]
url = "http://%s.php.net/rest/r/%s/allreleases.xml" % (channel, package.lower())
url = f"http://{channel}.php.net/rest/r/{package.lower()}/allreleases.xml"
output.einfo("Using: " + url)
@ -69,7 +69,7 @@ def scan_pkg(pkg, options):
if helpers.version_filtered(cp, ver, pv):
continue
url = "http://%s.php.net/get/%s-%s.tgz" % (channel, package, up_pv)
url = f"http://{channel}.php.net/get/{package}-{up_pv}.tgz"
url = mangling.mangle_url(url, options)
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

View File

@ -42,7 +42,7 @@ def scan_url(pkg, url, options):
gem = guess_gem(pkg.cpv, url)
if not gem:
output.eerror("Can't guess gem name using %s and %s" % (pkg.cpv, url))
output.eerror(f"Can't guess gem name using {pkg.cpv} and {url}")
return []
output.einfo("Using RubyGem API: %s" % gem)
@ -75,7 +75,7 @@ def scan_pkg(pkg, options):
pv = mangling.mangle_version(up_pv, options)
if helpers.version_filtered(cp, ver, pv):
continue
url = "http://rubygems.org/gems/%s-%s.gem" % (gem, up_pv)
url = f"http://rubygems.org/gems/{gem}-{up_pv}.gem"
url = mangling.mangle_url(url, options)
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
return ret

View File

@ -83,7 +83,7 @@ def version_is_nightly(a, b):
def version_blacklisted(cp, version):
rule = None
cpv = "%s-%s" % (cp, version)
cpv = f"{cp}-{version}"
# Check that the generated cpv can be used by portage
if not portage.versions.catpkgsplit(cpv):
@ -95,7 +95,7 @@ def version_blacklisted(cp, version):
None
if rule:
euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, rule))
euscan.output.einfo(f"{cpv} is blacklisted by rule {rule}")
return rule is not None
@ -264,7 +264,7 @@ def urlallowed(url):
if protocol == "ftp":
return True
baseurl = "%s://%s" % (protocol, domain)
baseurl = f"{protocol}://{domain}"
robotsurl = urllib.parse.urljoin(baseurl, "robots.txt")
if baseurl in rpcache:
@ -290,7 +290,7 @@ def urlallowed(url):
def urlopen(url, timeout=None, verb="GET"):
if not urlallowed(url):
euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
euscan.output.einfo(f"Url '{url}' blocked by robots.txt")
return None
if not timeout:

View File

@ -90,9 +90,9 @@ def transform_url(config, cpv, url):
def to_ebuild_uri(cpv, url):
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
p = "%s-%s" % (pkg, ver)
pvr = "%s%s" % (ver, "-%s" % rev if rev != "r0" else "")
pf = "%s-%s" % (pkg, pvr)
p = f"{pkg}-{ver}"
pvr = f"{ver}{f'-{rev}' if rev != 'r0' else ''}"
pf = f"{pkg}-{pvr}"
evars = (
(p, "P"),
(pkg, "PN"),
@ -140,10 +140,8 @@ def to_mirror(url):
for mirror_url in mirrors_[mirror_name]:
if url.startswith(mirror_url):
url_part = url.split(mirror_url)[1]
return "mirror://%s%s%s" % (
mirror_name,
"" if url_part.startswith("/") else "/",
url_part,
return "mirror://{}{}{}".format(
mirror_name, "" if url_part.startswith("/") else "/", url_part
)
return url
@ -220,7 +218,7 @@ class EuscanOutput(object):
def result(self, cp, version, urls, handler, confidence):
from euscan.version import get_version_type
cpv = "%s-%s" % (cp, version)
cpv = f"{cp}-{version}"
urls = " ".join(transform_url(self.config, cpv, url) for url in urls.split())
if self.config["format"] in ["json", "dict"]:
@ -239,13 +237,13 @@ class EuscanOutput(object):
print("Upstream Version:", pp.number("%s" % version), end=" ")
print(pp.path(" %s" % urls))
else:
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
print(pp.cpv(f"{cp}-{version}") + ":", pp.path(urls))
def metadata(self, key, value, show=True):
if self.config["format"]:
self.queries[self.current_query]["metadata"][key] = value
elif show:
print("%s: %s" % (key.capitalize(), value))
print(f"{key.capitalize()}: {value}")
def __getattr__(self, key):
if not self.config["quiet"] and self.current_query is not None:

View File

@ -134,7 +134,7 @@ def scan_upstream(query, on_progress=None):
if not CONFIG["quiet"]:
if not CONFIG["format"]:
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
pp.uprint(f" * {pp.cpv(pkg.cpv)} [{pp.section(pkg.repo_name())}]")
pp.uprint()
else:
output.metadata("overlay", pp.section(pkg.repo_name()))