Compare commits
16 Commits
1.0.1_alph
...
9465c14342
Author | SHA1 | Date | |
---|---|---|---|
9465c14342 | |||
22915bade5 | |||
443b5f62fd | |||
49f1fbbad1 | |||
a03b420c75 | |||
6c0b816e73 | |||
21fe4eafec | |||
377ba2f727 | |||
9f7ba6c9cd | |||
764bcf9ce8 | |||
c0be0e0b67 | |||
d48699e5fd | |||
eedf3c5939 | |||
7ac854dc61 | |||
0551629a9a | |||
17c4e19bc5 |
@ -1,8 +1,8 @@
|
||||
include AUTHORS
|
||||
include CHANGELOG.rst
|
||||
include LICENSE
|
||||
include README.rst
|
||||
include TODO
|
||||
include setup.py
|
||||
include pyproject.toml
|
||||
recursive-include bin *
|
||||
recursive-include man *
|
||||
recursive-include pym *.py
|
||||
recursive-include src *.py
|
||||
|
50
TODO
50
TODO
@ -50,53 +50,3 @@ euscan
|
||||
- Propose new remote-id: freecode
|
||||
e.g.: <remote-id type="freecode">projectname</remote-id>
|
||||
|
||||
|
||||
euscanwww
|
||||
---------
|
||||
|
||||
### misc
|
||||
|
||||
- Really fix mails: better formating
|
||||
- Always keep in db all found versions (when using an API only?). But don't display them if older than current packaged version, except maybe in the "upstream_version" column.
|
||||
|
||||
### packages
|
||||
|
||||
- Ignore alpha/beta if current is not alpha/beta: per-package setting using metadata.xml ?
|
||||
- ~arch / stable support: see "models: keywords"
|
||||
- stabilisation candidates: check stabilizations rules, and see how this can be automated
|
||||
- set upstream version by hand: will be done after uscan compatiblity
|
||||
|
||||
### logs
|
||||
|
||||
- Move log models into djeuscanhistory ?
|
||||
|
||||
### models
|
||||
|
||||
- Repository (added or not, from layman + repositories.xml)
|
||||
- Arches and Keyword
|
||||
- Metadata, herds, maintainers and homepage are per-version, not per package. Store it in Version instead.
|
||||
|
||||
### djportage (LOW-PRIORITY))
|
||||
|
||||
- Create standalone application to scan and represent portage trees in models using work done in:
|
||||
-- euscan
|
||||
-- p.g.o: https://github.com/bacher09/gentoo-packages
|
||||
-- gentoostats: https://github.com/gg7/gentoostats_server/blob/master/gentoostats/stats/models.py
|
||||
|
||||
The application should be easy to use, and we should be able to launch the scan process in a celery worker using "logging" for logs.
|
||||
The application should also be usable for p.g.o and gentoostats later...
|
||||
The scan process should be faster than the one using euscan. gentoo-packages have some interesting ideas for that (keeping metadata and ebuild hash, etc..)
|
||||
|
||||
### API (LOW-PRIORITY)
|
||||
|
||||
- Move to tastypie
|
||||
|
||||
### Overlays
|
||||
|
||||
/!\ blocked by "djportage" application
|
||||
Currently, overlay handling in euscan sucks (it's simply a column nothing more, and they are mostly handled by hand by layman). I'd like to be able to add and remove overlays (overlay name + svn/git/cvs/rsync url). Using a new model and layman API should make this task easy.
|
||||
|
||||
/!\ could be done earlier using a simple "overlay" table ... but how to pre-compute everything per-overlay ?
|
||||
Once done, a great feature would be to be able to select displayed overlay on euscan (as a global setting: for all pages). This is actually a lot of work, so you should work on that on a separated branch.
|
||||
|
||||
Note that this is more complicated that it seems, because a lot of things are precalculated (number of version for this herd, number of outdated versions, etc..), and selecting overlays would break all this. So you'll really need to experiment solutions for this one.
|
||||
|
18
bin/euscan
18
bin/euscan
@ -199,7 +199,7 @@ def print_usage(_error=None, help=None):
|
||||
# turquoise("`man %s`" % __productname__), file=out)
|
||||
|
||||
|
||||
class ParseArgsException(Exception):
|
||||
class ParseArgsError(Exception):
|
||||
"""For parseArgs() -> main() communications."""
|
||||
|
||||
def __init__(self, value):
|
||||
@ -220,9 +220,9 @@ def parse_args():
|
||||
return_code = True
|
||||
for o, a in opts:
|
||||
if o in ("-h", "--help"):
|
||||
raise ParseArgsException("help")
|
||||
raise ParseArgsError("help")
|
||||
elif o in ("-V", "--version"):
|
||||
raise ParseArgsException("version")
|
||||
raise ParseArgsError("version")
|
||||
elif o in ("-C", "--nocolor"):
|
||||
CONFIG["nocolor"] = True
|
||||
pp.output.nocolor()
|
||||
@ -283,14 +283,14 @@ def parse_args():
|
||||
# apply getopts to command line, show partial help on failure
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
|
||||
except getopt.GetoptError:
|
||||
raise ParseArgsException(opts_mode + "-options")
|
||||
except getopt.GetoptError as exc:
|
||||
raise ParseArgsError(opts_mode + "-options") from exc
|
||||
|
||||
# set options accordingly
|
||||
option_switch(opts)
|
||||
|
||||
if len(args) < 1:
|
||||
raise ParseArgsException("packages")
|
||||
raise ParseArgsError("packages")
|
||||
|
||||
return args
|
||||
|
||||
@ -306,7 +306,7 @@ def main():
|
||||
# parse command line options and actions
|
||||
try:
|
||||
queries = parse_args()
|
||||
except ParseArgsException as e:
|
||||
except ParseArgsError as e:
|
||||
if e.value == "help":
|
||||
print_usage(help="all")
|
||||
exit_helper(0)
|
||||
@ -362,7 +362,7 @@ def main():
|
||||
exit_helper(1)
|
||||
|
||||
except GentoolkitException as err:
|
||||
output.eerror("%s: %s" % (query, str(err)))
|
||||
output.eerror(f"{query}: {str(err)}")
|
||||
exit_helper(1)
|
||||
|
||||
except Exception as err:
|
||||
@ -372,7 +372,7 @@ def main():
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
print("-" * 60)
|
||||
|
||||
output.eerror("%s: %s" % (query, str(err)))
|
||||
output.eerror(f"{query}: {str(err)}")
|
||||
exit_helper(1)
|
||||
|
||||
if not ret and not CONFIG["quiet"]:
|
||||
|
@ -30,10 +30,10 @@ def guess_indent_values(before):
|
||||
def guess_for_tags(tags):
|
||||
for tag in tags:
|
||||
for i in [0, 2, 4, 6, 8, 12, 16]:
|
||||
if "\n%s<%s" % (" " * i, tag) in before:
|
||||
if f"\n{' ' * i}<{tag}" in before:
|
||||
return i, False
|
||||
for i in [0, 1, 2]:
|
||||
if "\n%s<%s" % ("\t" * i, tag) in before:
|
||||
if f"\n{'\t' * i}<{tag}" in before:
|
||||
return i, True
|
||||
return -1, False
|
||||
|
||||
@ -119,11 +119,11 @@ def get_deb_url(name):
|
||||
content = opened.read()
|
||||
|
||||
for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer("a")):
|
||||
if re.match("[^\s]+\.debian\.tar\.(?:gz|bz2)", link.text):
|
||||
if re.match(r"[^\s]+\.debian\.tar\.(?:gz|bz2)", link.text):
|
||||
deb_url = link["href"]
|
||||
deb_type = "source"
|
||||
break
|
||||
if re.match("[^\s]+\.diff\.gz", link.text):
|
||||
if re.match(r"[^\s]+\.diff\.gz", link.text):
|
||||
deb_url = link["href"]
|
||||
deb_type = "diff"
|
||||
break
|
||||
@ -157,7 +157,7 @@ def patch_metadata(package, watch_data, diff=False):
|
||||
for watch_line in watch_data.split("\n"): # there can be multiple lines
|
||||
watch_line = " ".join(watch_line.split()) # remove extra spaces and \n
|
||||
|
||||
version_parse = re.match("version=(\d+?)", watch_line)
|
||||
version_parse = re.match(r"version=(\d+?)", watch_line)
|
||||
if version_parse:
|
||||
version = version_parse.group(1)
|
||||
continue
|
||||
@ -180,7 +180,7 @@ def patch_metadata(package, watch_data, diff=False):
|
||||
if opt_name in valid:
|
||||
if opt_name == "uversionmangle":
|
||||
opt_name = "versionmangle"
|
||||
cleaned_opts.append('%s="%s"' % (opt_name, opt_value))
|
||||
cleaned_opts.append(f'{opt_name}="{opt_value}"')
|
||||
opts = " ".join(cleaned_opts)
|
||||
|
||||
# clean url from useless stuff. Just keep <base> [<filepattern>]
|
||||
@ -188,14 +188,9 @@ def patch_metadata(package, watch_data, diff=False):
|
||||
url = " ".join([x for x in url_search.groups() if x is not None])
|
||||
|
||||
if opts:
|
||||
watch_tag = '%s<watch version="%s" %s>%s</watch>' % (
|
||||
indent,
|
||||
version,
|
||||
opts,
|
||||
url,
|
||||
)
|
||||
watch_tag = f'{indent}<watch version="{version}" {opts}>{url}</watch>'
|
||||
else:
|
||||
watch_tag = '%s<watch version="%s">%s</watch>' % (indent, version, url)
|
||||
watch_tag = f'{indent}<watch version="{version}">{url}</watch>'
|
||||
watch_tags.append(watch_tag)
|
||||
|
||||
watch_tags = "\n".join(watch_tags)
|
||||
@ -203,11 +198,7 @@ def patch_metadata(package, watch_data, diff=False):
|
||||
if "<upstream>" in data:
|
||||
data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1)
|
||||
else:
|
||||
rep = "%s<upstream>\n%s\n%s</upstream>\n</pkgmetadata>" % (
|
||||
rindent,
|
||||
watch_tags,
|
||||
rindent,
|
||||
)
|
||||
rep = f"{rindent}<upstream>\n{watch_tags}\n{rindent}</upstream>\n</pkgmetadata>"
|
||||
data = data.replace("</pkgmetadata>", rep, 1)
|
||||
|
||||
if not diff:
|
||||
|
@ -22,6 +22,7 @@ dynamic = ["version"]
|
||||
|
||||
[project.urls]
|
||||
homepage = "https://gitlab.com/src_prepare/euscan-ng"
|
||||
changelog = "https://gitlab.com/src_prepare/euscan-ng/-/blob/master/CHANGELOG.rst"
|
||||
|
||||
[tool.setuptools]
|
||||
script-files = ["bin/euscan"]
|
||||
@ -39,3 +40,6 @@ src_paths = ["bin/euscan", "src/euscan/"]
|
||||
|
||||
[tool.ruff]
|
||||
extend-include = ["bin/euscan", "bin/euscan_patch_metadata"]
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["B", "E", "N", "UP", "W"]
|
||||
|
@ -74,13 +74,13 @@ BRUTEFORCE_BLACKLIST_URLS = [
|
||||
ROBOTS_TXT_BLACKLIST_DOMAINS = [
|
||||
"(.*)sourceforge(.*)",
|
||||
"(.*)github.com",
|
||||
"(.*)qt\.nokia\.com(.*)",
|
||||
"(.*)chromium\.org(.*)",
|
||||
"(.*)nodejs\.org(.*)",
|
||||
"(.*)download\.mono-project\.com(.*)",
|
||||
"(.*)fedorahosted\.org(.*)",
|
||||
"(.*)download\.tuxfamily\.org(.*)",
|
||||
"(.*)festvox\.org(.*)",
|
||||
r"(.*)qt\.nokia\.com(.*)",
|
||||
r"(.*)chromium\.org(.*)",
|
||||
r"(.*)nodejs\.org(.*)",
|
||||
r"(.*)download\.mono-project\.com(.*)",
|
||||
r"(.*)fedorahosted\.org(.*)",
|
||||
r"(.*)download\.tuxfamily\.org(.*)",
|
||||
r"(.*)festvox\.org(.*)",
|
||||
]
|
||||
|
||||
from euscan.out import EuscanOutput # noqa: E402
|
||||
|
@ -71,7 +71,7 @@ def package_from_ebuild(ebuild):
|
||||
return False
|
||||
|
||||
ebuild_split = ebuild.split("/")
|
||||
cpv = "%s/%s" % (ebuild_split[-3], pf)
|
||||
cpv = f"{ebuild_split[-3]}/{pf}"
|
||||
|
||||
if not portage.catpkgsplit(cpv):
|
||||
return False
|
||||
|
@ -13,7 +13,7 @@ from euscan import CONFIG, output
|
||||
handlers = {"package": [], "url": [], "all": {}}
|
||||
|
||||
# autoimport all modules in this directory and append them to handlers list
|
||||
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
||||
for loader, module_name, _is_pkg in pkgutil.walk_packages(__path__):
|
||||
module = loader.find_spec(module_name).loader.load_module(module_name)
|
||||
if not hasattr(module, "HANDLER_NAME"):
|
||||
continue
|
||||
@ -157,7 +157,7 @@ def scan_url(pkg, urls, options, on_progress=None):
|
||||
else:
|
||||
output.eerror("Can't find a suitable handler!")
|
||||
except Exception as e:
|
||||
output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, str(e)))
|
||||
output.ewarn(f"Handler failed: [{e.__class__.__name__}] {str(e)}")
|
||||
|
||||
if versions and CONFIG["oneshot"]:
|
||||
break
|
||||
|
@ -81,7 +81,7 @@ def mangle_version(up_pv):
|
||||
pv = ".".join(groups)
|
||||
|
||||
if rc_part:
|
||||
pv = "%s_rc%s" % (pv, rc_part)
|
||||
pv = f"{pv}_rc{rc_part}"
|
||||
|
||||
return pv
|
||||
|
||||
@ -128,7 +128,7 @@ def scan_pkg(pkg, options):
|
||||
fp = helpers.urlopen(url)
|
||||
except urllib.error.URLError:
|
||||
return []
|
||||
except IOError:
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
@ -157,13 +157,7 @@ def scan_pkg(pkg, options):
|
||||
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
||||
continue
|
||||
|
||||
url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
|
||||
version["cpanid"][0],
|
||||
version["cpanid"][0:1],
|
||||
version["cpanid"],
|
||||
version["archive"],
|
||||
)
|
||||
|
||||
url = f"mirror://cpan/authors/id/{version['cpanid'][0]}/{version['cpanid'][0:1]}/{version['cpanid']}/{version['archive']}"
|
||||
url = mangling.mangle_url(url, options)
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
|
||||
|
@ -40,7 +40,7 @@ def scan_pkg(pkg, options):
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
fp = urllib.request.urlopen(
|
||||
"http://freecode.com/projects/%s/releases/%s" % (package, release_id)
|
||||
f"http://freecode.com/projects/{package}/releases/{release_id}"
|
||||
)
|
||||
content = str(fp.read())
|
||||
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
|
||||
|
@ -8,14 +8,11 @@ import re
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
try:
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
except ImportError:
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
import portage
|
||||
from bs4 import BeautifulSoup, XMLParsedAsHTMLWarning
|
||||
|
||||
from euscan import (
|
||||
BRUTEFORCE_BLACKLIST_PACKAGES,
|
||||
@ -65,6 +62,7 @@ def confidence_score(found, original, minimum=CONFIDENCE):
|
||||
|
||||
|
||||
def scan_html(data, url, pattern):
|
||||
warnings.filterwarnings("ignore", category=XMLParsedAsHTMLWarning)
|
||||
soup = BeautifulSoup(data, features="lxml")
|
||||
results = []
|
||||
|
||||
@ -114,7 +112,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
||||
fp = helpers.urlopen(url)
|
||||
except urllib.error.URLError:
|
||||
return []
|
||||
except IOError:
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
@ -124,7 +122,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
||||
|
||||
results = []
|
||||
|
||||
if re.search(b"<\s*a\s+[^>]*href", data, re.I):
|
||||
if re.search(rb"<\s*a\s+[^>]*href", data, re.I):
|
||||
results.extend(scan_html(data, url, pattern))
|
||||
elif url.startswith("ftp://"):
|
||||
results.extend(scan_ftp(data, url, pattern))
|
||||
@ -156,7 +154,7 @@ def scan_url(pkg, url, options):
|
||||
if CONFIG["scan-dir"]:
|
||||
for bu in SCANDIR_BLACKLIST_URLS:
|
||||
if re.match(bu, url):
|
||||
output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
||||
output.einfo(f"{url} is blacklisted by rule {bu}")
|
||||
return []
|
||||
|
||||
resolved_url = helpers.parse_mirror(url)
|
||||
@ -169,14 +167,15 @@ def scan_url(pkg, url, options):
|
||||
if ver not in resolved_url:
|
||||
newver = helpers.version_change_end_sep(ver)
|
||||
if newver and newver in resolved_url:
|
||||
output.einfo("Version: using %s instead of %s" % (newver, ver))
|
||||
output.einfo(f"Version: using {newver} instead of {ver}")
|
||||
ver = newver
|
||||
|
||||
template = helpers.template_from_url(resolved_url, ver)
|
||||
if "${" not in template:
|
||||
output.einfo(
|
||||
"Url doesn't seems to depend on version: %s not found in %s"
|
||||
% (ver, resolved_url)
|
||||
"Url doesn't seems to depend on version: {} not found in {}".format(
|
||||
ver, resolved_url
|
||||
)
|
||||
)
|
||||
return []
|
||||
else:
|
||||
@ -203,12 +202,12 @@ def brute_force(pkg, url):
|
||||
|
||||
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
||||
if re.match(bp, cp):
|
||||
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||
output.einfo(f"{cp} is blacklisted by rule {bp}")
|
||||
return []
|
||||
|
||||
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
||||
if re.match(bp, url):
|
||||
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||
output.einfo(f"{cp} is blacklisted by rule {bp}")
|
||||
return []
|
||||
|
||||
output.einfo("Generating version from " + ver)
|
||||
@ -229,8 +228,7 @@ def brute_force(pkg, url):
|
||||
|
||||
if "${PV}" not in template:
|
||||
output.einfo(
|
||||
"Url doesn't seems to depend on full version: %s not found in %s"
|
||||
% (ver, url)
|
||||
f"Url doesn't seems to depend on full version: {ver} not found in {url}"
|
||||
)
|
||||
return []
|
||||
else:
|
||||
|
@ -44,12 +44,11 @@ def scan_url(pkg, url, options):
|
||||
fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)"))
|
||||
|
||||
output.einfo(
|
||||
"Using github API for: project=%s user=%s filename=%s"
|
||||
% (project, user, filename)
|
||||
f"Using github API for: project={project} user={user} filename={filename}"
|
||||
)
|
||||
|
||||
dlreq = urllib.request.urlopen(
|
||||
"https://api.github.com/repos/%s/%s/downloads" % (user, project)
|
||||
f"https://api.github.com/repos/{user}/{project}/downloads"
|
||||
)
|
||||
dls = json.load(dlreq)
|
||||
|
||||
|
@ -25,17 +25,17 @@ def clean_results(results):
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
results = generic.scan(pkg.cpv, url)
|
||||
results = generic.scan_url(pkg, url, options)
|
||||
|
||||
if generic.startswith("mirror://kde/unstable/"):
|
||||
url = generic.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||
results += generic.scan(pkg.cpv, url)
|
||||
if url.startswith("mirror://kde/unstable/"):
|
||||
url = url.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||
results += generic.scan_url(pkg, url, options)
|
||||
|
||||
if not results: # if nothing was found go brute forcing
|
||||
results = generic.brute_force(pkg.cpv, url)
|
||||
|
||||
if generic.startswith("mirror://kde/unstable/"):
|
||||
url = generic.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||
if url.startswith("mirror://kde/unstable/"):
|
||||
url = url.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||
results += generic.brute_force(pkg.cpv, url)
|
||||
|
||||
return clean_results(results)
|
||||
|
@ -20,7 +20,7 @@ def can_handle(pkg, url=None):
|
||||
|
||||
|
||||
def guess_package_and_channel(cp, url):
|
||||
match = re.search("http://(.*)\.php\.net/get/(.*)-(.*).tgz", url)
|
||||
match = re.search(r"http://(.*)\.php\.net/get/(.*)-(.*).tgz", url)
|
||||
|
||||
if match:
|
||||
host = match.group(1)
|
||||
@ -42,7 +42,7 @@ def scan_pkg(pkg, options):
|
||||
package = options["data"]
|
||||
channel = options["type"]
|
||||
|
||||
url = "http://%s.php.net/rest/r/%s/allreleases.xml" % (channel, package.lower())
|
||||
url = f"http://{channel}.php.net/rest/r/{package.lower()}/allreleases.xml"
|
||||
|
||||
output.einfo("Using: " + url)
|
||||
|
||||
@ -50,7 +50,7 @@ def scan_pkg(pkg, options):
|
||||
fp = helpers.urlopen(url)
|
||||
except urllib.error.URLError:
|
||||
return []
|
||||
except IOError:
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
@ -69,7 +69,7 @@ def scan_pkg(pkg, options):
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
|
||||
url = "http://%s.php.net/get/%s-%s.tgz" % (channel, package, up_pv)
|
||||
url = f"http://{channel}.php.net/get/{package}-{up_pv}.tgz"
|
||||
url = mangling.mangle_url(url, options)
|
||||
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
|
@ -19,7 +19,7 @@ def can_handle(pkg, url=None):
|
||||
|
||||
|
||||
def guess_package(cp, url):
|
||||
match = re.search("mirror://pypi/\w+/(.*)/.*", url)
|
||||
match = re.search(r"mirror://pypi/\w+/(.*)/.*", url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
|
@ -42,7 +42,7 @@ def scan_url(pkg, url, options):
|
||||
gem = guess_gem(pkg.cpv, url)
|
||||
|
||||
if not gem:
|
||||
output.eerror("Can't guess gem name using %s and %s" % (pkg.cpv, url))
|
||||
output.eerror(f"Can't guess gem name using {pkg.cpv} and {url}")
|
||||
return []
|
||||
|
||||
output.einfo("Using RubyGem API: %s" % gem)
|
||||
@ -58,7 +58,7 @@ def scan_pkg(pkg, options):
|
||||
fp = helpers.urlopen(url)
|
||||
except urllib.error.URLError:
|
||||
return []
|
||||
except IOError:
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
@ -75,7 +75,7 @@ def scan_pkg(pkg, options):
|
||||
pv = mangling.mangle_version(up_pv, options)
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
url = "http://rubygems.org/gems/%s-%s.gem" % (gem, up_pv)
|
||||
url = f"http://rubygems.org/gems/{gem}-{up_pv}.gem"
|
||||
url = mangling.mangle_url(url, options)
|
||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
||||
|
@ -24,7 +24,7 @@ def can_handle(*args):
|
||||
|
||||
|
||||
def handle_directory_patterns(base, file_pattern):
|
||||
"""
|
||||
r"""
|
||||
Directory pattern matching
|
||||
e.g.: base: ftp://ftp.nessus.org/pub/nessus/nessus-([\d\.]+)/src/
|
||||
file_pattern: nessus-core-([\d\.]+)\.tar\.gz
|
||||
@ -45,7 +45,7 @@ def handle_directory_patterns(base, file_pattern):
|
||||
fp = helpers.urlopen(basedir)
|
||||
except urllib.error.URLError:
|
||||
return []
|
||||
except IOError:
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
|
@ -83,7 +83,7 @@ def version_is_nightly(a, b):
|
||||
|
||||
def version_blacklisted(cp, version):
|
||||
rule = None
|
||||
cpv = "%s-%s" % (cp, version)
|
||||
cpv = f"{cp}-{version}"
|
||||
|
||||
# Check that the generated cpv can be used by portage
|
||||
if not portage.versions.catpkgsplit(cpv):
|
||||
@ -92,10 +92,9 @@ def version_blacklisted(cp, version):
|
||||
for bv in BLACKLIST_VERSIONS:
|
||||
if dep.match_from_list(bv, [cpv]):
|
||||
rule = bv
|
||||
None
|
||||
|
||||
if rule:
|
||||
euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, rule))
|
||||
euscan.output.einfo(f"{cpv} is blacklisted by rule {rule}")
|
||||
return rule is not None
|
||||
|
||||
|
||||
@ -223,7 +222,7 @@ def gen_versions(components, level):
|
||||
|
||||
for i in range(n, n - level, -1):
|
||||
increment_version(components, i - 1)
|
||||
for j in range(depth):
|
||||
for _j in range(depth):
|
||||
versions.append(list(components))
|
||||
increment_version(components, i - 1)
|
||||
|
||||
@ -264,7 +263,7 @@ def urlallowed(url):
|
||||
if protocol == "ftp":
|
||||
return True
|
||||
|
||||
baseurl = "%s://%s" % (protocol, domain)
|
||||
baseurl = f"{protocol}://{domain}"
|
||||
robotsurl = urllib.parse.urljoin(baseurl, "robots.txt")
|
||||
|
||||
if baseurl in rpcache:
|
||||
@ -280,7 +279,7 @@ def urlallowed(url):
|
||||
try:
|
||||
rp.read()
|
||||
rpcache[baseurl] = rp
|
||||
except IOError:
|
||||
except OSError:
|
||||
rp = None
|
||||
|
||||
setdefaulttimeout(timeout)
|
||||
@ -290,7 +289,7 @@ def urlallowed(url):
|
||||
|
||||
def urlopen(url, timeout=None, verb="GET"):
|
||||
if not urlallowed(url):
|
||||
euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
|
||||
euscan.output.einfo(f"Url '{url}' blocked by robots.txt")
|
||||
return None
|
||||
|
||||
if not timeout:
|
||||
@ -370,7 +369,7 @@ def tryurl(fileurl, template):
|
||||
|
||||
except urllib.error.URLError:
|
||||
result = None
|
||||
except IOError:
|
||||
except OSError:
|
||||
result = None
|
||||
|
||||
euscan.output.eend(errno.ENOENT if not result else 0)
|
||||
@ -383,9 +382,9 @@ def regex_from_template(template):
|
||||
regexp = re.escape(template)
|
||||
|
||||
# Unescape specific stuff
|
||||
regexp = regexp.replace("\$\{", "${")
|
||||
regexp = regexp.replace("\}", "}")
|
||||
regexp = regexp.replace("}\.$", "}.$")
|
||||
regexp = regexp.replace(r"\$\{", "${")
|
||||
regexp = regexp.replace(r"\}", "}")
|
||||
regexp = regexp.replace(r"}\.$", "}.$")
|
||||
|
||||
# Replace ${\d+}
|
||||
# regexp = regexp.replace('${0}', r'([\d]+?)')
|
||||
|
@ -19,7 +19,7 @@ from euscan.helpers import dict_to_xml
|
||||
mirrors_ = None
|
||||
|
||||
|
||||
class ProgressHandler(object):
|
||||
class ProgressHandler:
|
||||
def __init__(self, progress_bar):
|
||||
self.curval = 0
|
||||
self.maxval = 0
|
||||
@ -74,7 +74,7 @@ def progress_bar():
|
||||
|
||||
def clean_colors(string):
|
||||
if isinstance(string, str):
|
||||
string = re.sub("\033\[[0-9;]+m", "", string)
|
||||
string = re.sub(r"\033\[[0-9;]+m", "", string)
|
||||
string = re.sub(r"\\u001b\[[0-9;]+m", "", string)
|
||||
string = re.sub(r"\x1b\[[0-9;]+m", "", string)
|
||||
return string
|
||||
@ -90,9 +90,9 @@ def transform_url(config, cpv, url):
|
||||
|
||||
def to_ebuild_uri(cpv, url):
|
||||
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
||||
p = "%s-%s" % (pkg, ver)
|
||||
pvr = "%s%s" % (ver, "-%s" % rev if rev != "r0" else "")
|
||||
pf = "%s-%s" % (pkg, pvr)
|
||||
p = f"{pkg}-{ver}"
|
||||
pvr = f"{ver}{f'-{rev}' if rev != 'r0' else ''}"
|
||||
pf = f"{pkg}-{pvr}"
|
||||
evars = (
|
||||
(p, "P"),
|
||||
(pkg, "PN"),
|
||||
@ -140,10 +140,8 @@ def to_mirror(url):
|
||||
for mirror_url in mirrors_[mirror_name]:
|
||||
if url.startswith(mirror_url):
|
||||
url_part = url.split(mirror_url)[1]
|
||||
return "mirror://%s%s%s" % (
|
||||
mirror_name,
|
||||
"" if url_part.startswith("/") else "/",
|
||||
url_part,
|
||||
return "mirror://{}{}{}".format(
|
||||
mirror_name, "" if url_part.startswith("/") else "/", url_part
|
||||
)
|
||||
return url
|
||||
|
||||
@ -154,17 +152,17 @@ class EOutputMem(EOutput):
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EOutputMem, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.out = StringIO()
|
||||
|
||||
def getvalue(self):
|
||||
return self.out.getvalue()
|
||||
|
||||
def _write(self, f, msg):
|
||||
super(EOutputMem, self)._write(self.out, msg)
|
||||
super()._write(self.out, msg)
|
||||
|
||||
|
||||
class EuscanOutput(object):
|
||||
class EuscanOutput:
|
||||
"""
|
||||
Class that handles output for euscan
|
||||
"""
|
||||
@ -220,7 +218,7 @@ class EuscanOutput(object):
|
||||
def result(self, cp, version, urls, handler, confidence):
|
||||
from euscan.version import get_version_type
|
||||
|
||||
cpv = "%s-%s" % (cp, version)
|
||||
cpv = f"{cp}-{version}"
|
||||
urls = " ".join(transform_url(self.config, cpv, url) for url in urls.split())
|
||||
|
||||
if self.config["format"] in ["json", "dict"]:
|
||||
@ -239,13 +237,13 @@ class EuscanOutput(object):
|
||||
print("Upstream Version:", pp.number("%s" % version), end=" ")
|
||||
print(pp.path(" %s" % urls))
|
||||
else:
|
||||
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
|
||||
print(pp.cpv(f"{cp}-{version}") + ":", pp.path(urls))
|
||||
|
||||
def metadata(self, key, value, show=True):
|
||||
if self.config["format"]:
|
||||
self.queries[self.current_query]["metadata"][key] = value
|
||||
elif show:
|
||||
print("%s: %s" % (key.capitalize(), value))
|
||||
print(f"{key.capitalize()}: {value}")
|
||||
|
||||
def __getattr__(self, key):
|
||||
if not self.config["quiet"] and self.current_query is not None:
|
||||
|
@ -76,14 +76,14 @@ def reload_gentoolkit():
|
||||
if not hasattr(gentoolkit.package, "PORTDB"):
|
||||
return
|
||||
|
||||
PORTDB = portage.db[portage.root]["porttree"].dbapi
|
||||
portdb = portage.db[portage.root]["porttree"].dbapi
|
||||
|
||||
if hasattr(gentoolkit.dbapi, "PORTDB"):
|
||||
gentoolkit.dbapi.PORTDB = PORTDB
|
||||
gentoolkit.dbapi.PORTDB = portdb
|
||||
if hasattr(gentoolkit.package, "PORTDB"):
|
||||
gentoolkit.package.PORTDB = PORTDB
|
||||
gentoolkit.package.PORTDB = portdb
|
||||
if hasattr(gentoolkit.query, "PORTDB"):
|
||||
gentoolkit.query.PORTDB = PORTDB
|
||||
gentoolkit.query.PORTDB = portdb
|
||||
|
||||
|
||||
def scan_upstream(query, on_progress=None):
|
||||
@ -134,7 +134,7 @@ def scan_upstream(query, on_progress=None):
|
||||
|
||||
if not CONFIG["quiet"]:
|
||||
if not CONFIG["format"]:
|
||||
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
|
||||
pp.uprint(f" * {pp.cpv(pkg.cpv)} [{pp.section(pkg.repo_name())}]")
|
||||
pp.uprint()
|
||||
else:
|
||||
output.metadata("overlay", pp.section(pkg.repo_name()))
|
||||
|
@ -22,7 +22,7 @@ def get_version_type(version):
|
||||
if "9999" in version or "99999999" in version:
|
||||
return "live"
|
||||
|
||||
for token in re.findall("[\._-]([a-zA-Z]+)", version):
|
||||
for token in re.findall(r"[\._-]([a-zA-Z]+)", version):
|
||||
if token in gentoo_types:
|
||||
types.append(token)
|
||||
if types:
|
||||
|
Reference in New Issue
Block a user