Run black on project
Signed-off-by: Alfred Wingate <parona@protonmail.com>
This commit is contained in:
parent
a91775919c
commit
d860708ec9
280
bin/euscan
280
bin/euscan
@ -35,8 +35,8 @@ from euscan._version import __version__
|
|||||||
|
|
||||||
|
|
||||||
# Globals
|
# Globals
|
||||||
isatty = os.environ.get('TERM') != 'dumb' and sys.stdout.isatty()
|
isatty = os.environ.get("TERM") != "dumb" and sys.stdout.isatty()
|
||||||
isatty_stderr = os.environ.get('TERM') != 'dumb' and sys.stderr.isatty()
|
isatty_stderr = os.environ.get("TERM") != "dumb" and sys.stderr.isatty()
|
||||||
|
|
||||||
|
|
||||||
def exit_helper(status):
|
def exit_helper(status):
|
||||||
@ -62,8 +62,7 @@ def setup_signals():
|
|||||||
|
|
||||||
def print_version():
|
def print_version():
|
||||||
"""Output the version info."""
|
"""Output the version info."""
|
||||||
print("%s (%s) - %s" \
|
print("%s (%s) - %s" % (__productname__, __version__, __description__))
|
||||||
% (__productname__, __version__, __description__))
|
|
||||||
print()
|
print()
|
||||||
print("Author: %s <%s>" % (__author__, __email__))
|
print("Author: %s <%s>" % (__author__, __email__))
|
||||||
print("Copyright 2011 Gentoo Foundation")
|
print("Copyright 2011 Gentoo Foundation")
|
||||||
@ -78,78 +77,134 @@ def print_usage(_error=None, help=None):
|
|||||||
if _error:
|
if _error:
|
||||||
out = sys.stderr
|
out = sys.stderr
|
||||||
|
|
||||||
if not _error in ('global-options', 'packages',):
|
if not _error in (
|
||||||
|
"global-options",
|
||||||
|
"packages",
|
||||||
|
):
|
||||||
_error = None
|
_error = None
|
||||||
|
|
||||||
if not _error and not help:
|
if not _error and not help:
|
||||||
help = 'all'
|
help = "all"
|
||||||
|
|
||||||
if _error in ('global-options',):
|
if _error in ("global-options",):
|
||||||
output.eerror("Wrong option on command line.\n")
|
output.eerror("Wrong option on command line.\n")
|
||||||
|
|
||||||
if _error in ('packages',):
|
if _error in ("packages",):
|
||||||
output.eerror("You need to specify exactly one package.\n")
|
output.eerror("You need to specify exactly one package.\n")
|
||||||
|
|
||||||
print(white("Usage:"), file=out)
|
print(white("Usage:"), file=out)
|
||||||
if _error in ('global-options', 'packages',) or help == 'all':
|
if (
|
||||||
print(" " + turquoise(__productname__),
|
_error
|
||||||
yellow("[options]"),
|
in (
|
||||||
green("<package> [<package> [...]]"), file=out)
|
"global-options",
|
||||||
if _error in ('global-options',) or help == 'all':
|
"packages",
|
||||||
print(" " + turquoise(__productname__),
|
)
|
||||||
yellow("[--help, --version]"), file=out)
|
or help == "all"
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
" " + turquoise(__productname__),
|
||||||
|
yellow("[options]"),
|
||||||
|
green("<package> [<package> [...]]"),
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
if _error in ("global-options",) or help == "all":
|
||||||
|
print(" " + turquoise(__productname__), yellow("[--help, --version]"), file=out)
|
||||||
|
|
||||||
print(file=out)
|
print(file=out)
|
||||||
if _error in ('global-options',) or help:
|
if _error in ("global-options",) or help:
|
||||||
print("Available ", yellow("options") + ":", file=out)
|
print("Available ", yellow("options") + ":", file=out)
|
||||||
print(yellow(" -C, --nocolor") +
|
print(
|
||||||
" - turn off colors on output", file=out)
|
yellow(" -C, --nocolor")
|
||||||
print(yellow(" -q, --quiet") +
|
+ " - turn off colors on output",
|
||||||
" - be as quiet as possible", file=out)
|
file=out,
|
||||||
print(yellow(" -h, --help") +
|
)
|
||||||
" - display the help screen", file=out)
|
print(
|
||||||
print(yellow(" -V, --version") +
|
yellow(" -q, --quiet")
|
||||||
" - display version info", file=out)
|
+ " - be as quiet as possible",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
yellow(" -h, --help")
|
||||||
|
+ " - display the help screen",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
yellow(" -V, --version") + " - display version info",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
print(file=out)
|
print(file=out)
|
||||||
print(yellow(" -1, --oneshot") +
|
print(
|
||||||
" - stop as soon as a new version is found",
|
yellow(" -1, --oneshot")
|
||||||
file=out)
|
+ " - stop as soon as a new version is found",
|
||||||
print(yellow(" -b, --brute-force=<level>") +
|
file=out,
|
||||||
" - define the brute force " + yellow("<level>") +
|
)
|
||||||
" (default: 2)\n" +
|
print(
|
||||||
" " * 38 + "bigger levels will generate more versions numbers\n" +
|
yellow(" -b, --brute-force=<level>")
|
||||||
" " * 38 + "0 means disabled", file=out)
|
+ " - define the brute force "
|
||||||
print(yellow(" -f, --format=<format>") +
|
+ yellow("<level>")
|
||||||
" - define the output " + yellow("<format>") +
|
+ " (default: 2)\n"
|
||||||
" (available: json, xml)", file=out)
|
+ " " * 38
|
||||||
print(yellow(" -p, --progress") +
|
+ "bigger levels will generate more versions numbers\n"
|
||||||
" - display a progress bar", file=out)
|
+ " " * 38
|
||||||
print(yellow(" -i, --ignore-pre-release") +
|
+ "0 means disabled",
|
||||||
" " * 11 + "- Ignore non-stable versions", file=out)
|
file=out,
|
||||||
print(yellow(" -I, --ignore-pre-release-if-stable") +
|
)
|
||||||
" - Ignore non-stable versions only if current\n" +
|
print(
|
||||||
" " * 38 + "version is stable", file=out)
|
yellow(" -f, --format=<format>")
|
||||||
print(yellow(" --mirror") +
|
+ " - define the output "
|
||||||
" - use mirror:// URIs", file=out)
|
+ yellow("<format>")
|
||||||
print(yellow(" --ebuild-uri") +
|
+ " (available: json, xml)",
|
||||||
" - use ebuild variables in URIs", file=out)
|
file=out,
|
||||||
print(yellow(" --no-handlers") +
|
)
|
||||||
" - exclude handlers (comma-separated list)",
|
print(
|
||||||
file=out)
|
yellow(" -p, --progress") + " - display a progress bar",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
yellow(" -i, --ignore-pre-release")
|
||||||
|
+ " " * 11
|
||||||
|
+ "- Ignore non-stable versions",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
yellow(" -I, --ignore-pre-release-if-stable")
|
||||||
|
+ " - Ignore non-stable versions only if current\n"
|
||||||
|
+ " " * 38
|
||||||
|
+ "version is stable",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
yellow(" --mirror") + " - use mirror:// URIs",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
yellow(" --ebuild-uri")
|
||||||
|
+ " - use ebuild variables in URIs",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
yellow(" --no-handlers")
|
||||||
|
+ " - exclude handlers (comma-separated list)",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
print(file=out)
|
print(file=out)
|
||||||
|
|
||||||
if _error in ('packages',) or help:
|
if _error in ("packages",) or help:
|
||||||
print(green(" package") +
|
print(
|
||||||
" " * 28 + "- the packages (or ebuilds) you want to scan",
|
green(" package")
|
||||||
file=out)
|
+ " " * 28
|
||||||
|
+ "- the packages (or ebuilds) you want to scan",
|
||||||
|
file=out,
|
||||||
|
)
|
||||||
print(file=out)
|
print(file=out)
|
||||||
|
|
||||||
#print( "More detailed instruction can be found in",
|
# print( "More detailed instruction can be found in",
|
||||||
#turquoise("`man %s`" % __productname__), file=out)
|
# turquoise("`man %s`" % __productname__), file=out)
|
||||||
|
|
||||||
|
|
||||||
class ParseArgsException(Exception):
|
class ParseArgsException(Exception):
|
||||||
"""For parseArgs() -> main() communications."""
|
"""For parseArgs() -> main() communications."""
|
||||||
|
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
@ -168,92 +223,102 @@ def parse_args():
|
|||||||
return_code = True
|
return_code = True
|
||||||
for o, a in opts:
|
for o, a in opts:
|
||||||
if o in ("-h", "--help"):
|
if o in ("-h", "--help"):
|
||||||
raise ParseArgsException('help')
|
raise ParseArgsException("help")
|
||||||
elif o in ("-V", "--version"):
|
elif o in ("-V", "--version"):
|
||||||
raise ParseArgsException('version')
|
raise ParseArgsException("version")
|
||||||
elif o in ("-C", "--nocolor"):
|
elif o in ("-C", "--nocolor"):
|
||||||
CONFIG['nocolor'] = True
|
CONFIG["nocolor"] = True
|
||||||
pp.output.nocolor()
|
pp.output.nocolor()
|
||||||
elif o in ("-q", "--quiet"):
|
elif o in ("-q", "--quiet"):
|
||||||
CONFIG['quiet'] = True
|
CONFIG["quiet"] = True
|
||||||
CONFIG['verbose'] = 0
|
CONFIG["verbose"] = 0
|
||||||
elif o in ("-1", "--oneshot"):
|
elif o in ("-1", "--oneshot"):
|
||||||
CONFIG['oneshot'] = True
|
CONFIG["oneshot"] = True
|
||||||
elif o in ("-b", "--brute-force"):
|
elif o in ("-b", "--brute-force"):
|
||||||
CONFIG['brute-force'] = int(a)
|
CONFIG["brute-force"] = int(a)
|
||||||
elif o in ("-v", "--verbose") and not CONFIG['quiet']:
|
elif o in ("-v", "--verbose") and not CONFIG["quiet"]:
|
||||||
CONFIG['verbose'] += 1
|
CONFIG["verbose"] += 1
|
||||||
elif o in ("-f", "--format"):
|
elif o in ("-f", "--format"):
|
||||||
CONFIG['format'] = a
|
CONFIG["format"] = a
|
||||||
CONFIG['nocolor'] = True
|
CONFIG["nocolor"] = True
|
||||||
pp.output.nocolor()
|
pp.output.nocolor()
|
||||||
elif o in ("-p", "--progress"):
|
elif o in ("-p", "--progress"):
|
||||||
CONFIG['progress'] = isatty_stderr
|
CONFIG["progress"] = isatty_stderr
|
||||||
elif o in ("--mirror"):
|
elif o in ("--mirror"):
|
||||||
CONFIG['mirror'] = True
|
CONFIG["mirror"] = True
|
||||||
elif o in ("-i", "--ignore-pre-release"):
|
elif o in ("-i", "--ignore-pre-release"):
|
||||||
CONFIG['ignore-pre-release'] = True
|
CONFIG["ignore-pre-release"] = True
|
||||||
elif o in ("-I", "--ignore-pre-release-if-stable"):
|
elif o in ("-I", "--ignore-pre-release-if-stable"):
|
||||||
CONFIG['ignore-pre-release-if-stable'] = True
|
CONFIG["ignore-pre-release-if-stable"] = True
|
||||||
elif o in ("--ebuild-uri"):
|
elif o in ("--ebuild-uri"):
|
||||||
CONFIG['ebuild-uri'] = True
|
CONFIG["ebuild-uri"] = True
|
||||||
elif o in ("--no-handlers"):
|
elif o in ("--no-handlers"):
|
||||||
CONFIG['handlers-exclude'] = a.split(",")
|
CONFIG["handlers-exclude"] = a.split(",")
|
||||||
else:
|
else:
|
||||||
return_code = False
|
return_code = False
|
||||||
|
|
||||||
return return_code
|
return return_code
|
||||||
|
|
||||||
# here are the different allowed command line options (getopt args)
|
# here are the different allowed command line options (getopt args)
|
||||||
getopt_options = {'short': {}, 'long': {}}
|
getopt_options = {"short": {}, "long": {}}
|
||||||
getopt_options['short']['global'] = "hVCqv1b:f:piI"
|
getopt_options["short"]["global"] = "hVCqv1b:f:piI"
|
||||||
getopt_options['long']['global'] = [
|
getopt_options["long"]["global"] = [
|
||||||
"help", "version", "nocolor", "quiet", "verbose", "oneshot",
|
"help",
|
||||||
"brute-force=", "format=", "progress", "mirror", "ignore-pre-release",
|
"version",
|
||||||
"ignore-pre-release-if-stable", "ebuild-uri", "no-handlers="
|
"nocolor",
|
||||||
|
"quiet",
|
||||||
|
"verbose",
|
||||||
|
"oneshot",
|
||||||
|
"brute-force=",
|
||||||
|
"format=",
|
||||||
|
"progress",
|
||||||
|
"mirror",
|
||||||
|
"ignore-pre-release",
|
||||||
|
"ignore-pre-release-if-stable",
|
||||||
|
"ebuild-uri",
|
||||||
|
"no-handlers=",
|
||||||
]
|
]
|
||||||
|
|
||||||
short_opts = getopt_options['short']['global']
|
short_opts = getopt_options["short"]["global"]
|
||||||
long_opts = getopt_options['long']['global']
|
long_opts = getopt_options["long"]["global"]
|
||||||
opts_mode = 'global'
|
opts_mode = "global"
|
||||||
|
|
||||||
# apply getopts to command line, show partial help on failure
|
# apply getopts to command line, show partial help on failure
|
||||||
try:
|
try:
|
||||||
opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
|
opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
|
||||||
except:
|
except:
|
||||||
raise ParseArgsException(opts_mode + '-options')
|
raise ParseArgsException(opts_mode + "-options")
|
||||||
|
|
||||||
# set options accordingly
|
# set options accordingly
|
||||||
option_switch(opts)
|
option_switch(opts)
|
||||||
|
|
||||||
if len(args) < 1:
|
if len(args) < 1:
|
||||||
raise ParseArgsException('packages')
|
raise ParseArgsException("packages")
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Parse command line and execute all actions."""
|
"""Parse command line and execute all actions."""
|
||||||
CONFIG['nocolor'] = (
|
CONFIG["nocolor"] = CONFIG["nocolor"] or (
|
||||||
CONFIG['nocolor'] or
|
settings["NOCOLOR"] in ("yes", "true") or not isatty
|
||||||
(settings["NOCOLOR"] in ('yes', 'true') or not isatty)
|
|
||||||
)
|
)
|
||||||
if CONFIG['nocolor']:
|
if CONFIG["nocolor"]:
|
||||||
pp.output.nocolor()
|
pp.output.nocolor()
|
||||||
|
|
||||||
# parse command line options and actions
|
# parse command line options and actions
|
||||||
try:
|
try:
|
||||||
queries = parse_args()
|
queries = parse_args()
|
||||||
except ParseArgsException as e:
|
except ParseArgsException as e:
|
||||||
if e.value == 'help':
|
if e.value == "help":
|
||||||
print_usage(help='all')
|
print_usage(help="all")
|
||||||
exit_helper(0)
|
exit_helper(0)
|
||||||
|
|
||||||
elif e.value[:5] == 'help-':
|
elif e.value[:5] == "help-":
|
||||||
print_usage(help=e.value[5:])
|
print_usage(help=e.value[5:])
|
||||||
exit_helper(0)
|
exit_helper(0)
|
||||||
|
|
||||||
elif e.value == 'version':
|
elif e.value == "version":
|
||||||
print_version()
|
print_version()
|
||||||
exit_helper(0)
|
exit_helper(0)
|
||||||
|
|
||||||
@ -261,14 +326,14 @@ def main():
|
|||||||
print_usage(e.value)
|
print_usage(e.value)
|
||||||
exit_helper(EINVAL)
|
exit_helper(EINVAL)
|
||||||
|
|
||||||
if CONFIG['verbose'] > 2:
|
if CONFIG["verbose"] > 2:
|
||||||
HTTPConnection.debuglevel = 1
|
HTTPConnection.debuglevel = 1
|
||||||
|
|
||||||
if not CONFIG["format"] and not CONFIG['quiet']:
|
if not CONFIG["format"] and not CONFIG["quiet"]:
|
||||||
CONFIG["progress"] = False
|
CONFIG["progress"] = False
|
||||||
|
|
||||||
on_progress = None
|
on_progress = None
|
||||||
if CONFIG['progress']:
|
if CONFIG["progress"]:
|
||||||
on_progress_gen = progress_bar()
|
on_progress_gen = progress_bar()
|
||||||
on_progress = next(on_progress_gen)
|
on_progress = next(on_progress_gen)
|
||||||
on_progress(maxval=len(queries) * 100, increment=0, label="Working...")
|
on_progress(maxval=len(queries) * 100, increment=0, label="Working...")
|
||||||
@ -293,35 +358,36 @@ def main():
|
|||||||
from os.path import basename # To get the short name
|
from os.path import basename # To get the short name
|
||||||
|
|
||||||
output.eerror(
|
output.eerror(
|
||||||
"The short ebuild name '%s' is ambiguous. Please specify" %
|
"The short ebuild name '%s' is ambiguous. Please specify"
|
||||||
basename(pkgs[0]) +
|
% basename(pkgs[0])
|
||||||
"one of the above fully-qualified ebuild names instead."
|
+ "one of the above fully-qualified ebuild names instead."
|
||||||
)
|
)
|
||||||
exit_helper(1)
|
exit_helper(1)
|
||||||
|
|
||||||
except GentoolkitException as err:
|
except GentoolkitException as err:
|
||||||
output.eerror('%s: %s' % (query, str(err)))
|
output.eerror("%s: %s" % (query, str(err)))
|
||||||
exit_helper(1)
|
exit_helper(1)
|
||||||
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
import traceback
|
import traceback
|
||||||
print ('-' * 60)
|
|
||||||
traceback.print_exc(file=sys.stderr)
|
|
||||||
print ('-' * 60)
|
|
||||||
|
|
||||||
output.eerror('%s: %s' % (query, str(err)))
|
print("-" * 60)
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
print("-" * 60)
|
||||||
|
|
||||||
|
output.eerror("%s: %s" % (query, str(err)))
|
||||||
exit_helper(1)
|
exit_helper(1)
|
||||||
|
|
||||||
if not ret and not CONFIG['quiet']:
|
if not ret and not CONFIG["quiet"]:
|
||||||
output.einfo(
|
output.einfo(
|
||||||
"Didn't find any new version, check package's homepage " +
|
"Didn't find any new version, check package's homepage "
|
||||||
"for more informations"
|
+ "for more informations"
|
||||||
)
|
)
|
||||||
|
|
||||||
if not (CONFIG['format'] or CONFIG['quiet']) and len(queries) > 1:
|
if not (CONFIG["format"] or CONFIG["quiet"]) and len(queries) > 1:
|
||||||
print("")
|
print("")
|
||||||
|
|
||||||
if CONFIG['progress']:
|
if CONFIG["progress"]:
|
||||||
next(on_progress_gen)
|
next(on_progress_gen)
|
||||||
print("\n", file=sys.stderr)
|
print("\n", file=sys.stderr)
|
||||||
|
|
||||||
|
@ -28,25 +28,25 @@ def guess_indent_values(before):
|
|||||||
def guess_for_tags(tags):
|
def guess_for_tags(tags):
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
for i in [0, 2, 4, 6, 8, 12, 16]:
|
for i in [0, 2, 4, 6, 8, 12, 16]:
|
||||||
if '\n%s<%s' % (' ' * i, tag) in before:
|
if "\n%s<%s" % (" " * i, tag) in before:
|
||||||
return i, False
|
return i, False
|
||||||
for i in [0, 1, 2]:
|
for i in [0, 1, 2]:
|
||||||
if '\n%s<%s' % ('\t' * i, tag) in before:
|
if "\n%s<%s" % ("\t" * i, tag) in before:
|
||||||
return i, True
|
return i, True
|
||||||
return -1, False
|
return -1, False
|
||||||
|
|
||||||
rindent, tab = guess_for_tags(
|
rindent, tab = guess_for_tags(
|
||||||
['herd', 'maintainer', 'longdescription', 'use', 'upstream']
|
["herd", "maintainer", "longdescription", "use", "upstream"]
|
||||||
)
|
)
|
||||||
if rindent == -1:
|
if rindent == -1:
|
||||||
rindent = 2
|
rindent = 2
|
||||||
rindent_str = ('\t' if tab else ' ') * rindent
|
rindent_str = ("\t" if tab else " ") * rindent
|
||||||
indent, tab = guess_for_tags(['watch', 'name', 'email'])
|
indent, tab = guess_for_tags(["watch", "name", "email"])
|
||||||
if indent == -1:
|
if indent == -1:
|
||||||
indent = rindent * 2 if rindent else 4
|
indent = rindent * 2 if rindent else 4
|
||||||
if rindent and rindent_str == '\t':
|
if rindent and rindent_str == "\t":
|
||||||
tab = True
|
tab = True
|
||||||
indent_str = ('\t' if tab else ' ') * indent
|
indent_str = ("\t" if tab else " ") * indent
|
||||||
return rindent_str, indent_str
|
return rindent_str, indent_str
|
||||||
|
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ def handle_diff(deb_url):
|
|||||||
|
|
||||||
watch_data = ""
|
watch_data = ""
|
||||||
|
|
||||||
fp = gzip.open(temp_deb, 'rb')
|
fp = gzip.open(temp_deb, "rb")
|
||||||
for line in fp:
|
for line in fp:
|
||||||
if re.match(r"\+\+\+ .+?/debian/watch", line):
|
if re.match(r"\+\+\+ .+?/debian/watch", line):
|
||||||
fp.readline() # diff lines, don't care
|
fp.readline() # diff lines, don't care
|
||||||
@ -144,8 +144,9 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
data = original
|
data = original
|
||||||
|
|
||||||
# clean watch_data
|
# clean watch_data
|
||||||
watch_data = "\n".join([line for line in watch_data.split("\n")
|
watch_data = "\n".join(
|
||||||
if not line.startswith("#")]) # comments
|
[line for line in watch_data.split("\n") if not line.startswith("#")]
|
||||||
|
) # comments
|
||||||
|
|
||||||
watch_data = watch_data.replace("\\\n", "") # remove backslashes
|
watch_data = watch_data.replace("\\\n", "") # remove backslashes
|
||||||
|
|
||||||
@ -163,10 +164,7 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# parse watch_line
|
# parse watch_line
|
||||||
result = re.match(
|
result = re.match(r'(?:opts=(?:"([^"]+?)"|([^\s]+?)) )?(.*)', watch_line)
|
||||||
r'(?:opts=(?:"([^"]+?)"|([^\s]+?)) )?(.*)',
|
|
||||||
watch_line
|
|
||||||
)
|
|
||||||
|
|
||||||
opts_quote, opts, url = result.groups()
|
opts_quote, opts, url = result.groups()
|
||||||
opts = opts_quote or opts
|
opts = opts_quote or opts
|
||||||
@ -188,21 +186,27 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
url = " ".join([x for x in url_search.groups() if x is not None])
|
url = " ".join([x for x in url_search.groups() if x is not None])
|
||||||
|
|
||||||
if opts:
|
if opts:
|
||||||
watch_tag = '%s<watch version="%s" %s>%s</watch>' % \
|
watch_tag = '%s<watch version="%s" %s>%s</watch>' % (
|
||||||
(indent, version, opts, url)
|
indent,
|
||||||
|
version,
|
||||||
|
opts,
|
||||||
|
url,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
watch_tag = '%s<watch version="%s">%s</watch>' % \
|
watch_tag = '%s<watch version="%s">%s</watch>' % (indent, version, url)
|
||||||
(indent, version, url)
|
|
||||||
watch_tags.append(watch_tag)
|
watch_tags.append(watch_tag)
|
||||||
|
|
||||||
watch_tags = "\n".join(watch_tags)
|
watch_tags = "\n".join(watch_tags)
|
||||||
|
|
||||||
if '<upstream>' in data:
|
if "<upstream>" in data:
|
||||||
data = data.replace('<upstream>', '<upstream>\n%s' % watch_tags, 1)
|
data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1)
|
||||||
else:
|
else:
|
||||||
rep = '%s<upstream>\n%s\n%s</upstream>\n</pkgmetadata>' % \
|
rep = "%s<upstream>\n%s\n%s</upstream>\n</pkgmetadata>" % (
|
||||||
(rindent, watch_tags, rindent)
|
rindent,
|
||||||
data = data.replace('</pkgmetadata>', rep, 1)
|
watch_tags,
|
||||||
|
rindent,
|
||||||
|
)
|
||||||
|
data = data.replace("</pkgmetadata>", rep, 1)
|
||||||
|
|
||||||
if not diff:
|
if not diff:
|
||||||
return data
|
return data
|
||||||
@ -214,8 +218,8 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
res = unified_diff(
|
res = unified_diff(
|
||||||
original.splitlines(True),
|
original.splitlines(True),
|
||||||
data.splitlines(True),
|
data.splitlines(True),
|
||||||
fromfile=os.path.join('a/', metadata_path),
|
fromfile=os.path.join("a/", metadata_path),
|
||||||
tofile=os.path.join('b/', metadata_path),
|
tofile=os.path.join("b/", metadata_path),
|
||||||
)
|
)
|
||||||
return "".join([x for x in res])
|
return "".join([x for x in res])
|
||||||
|
|
||||||
@ -223,12 +227,12 @@ def patch_metadata(package, watch_data, diff=False):
|
|||||||
def process_package(query, diff=False):
|
def process_package(query, diff=False):
|
||||||
try:
|
try:
|
||||||
matches = Query(query).smart_find(
|
matches = Query(query).smart_find(
|
||||||
in_installed=True,
|
in_installed=True,
|
||||||
in_porttree=True,
|
in_porttree=True,
|
||||||
in_overlay=True,
|
in_overlay=True,
|
||||||
include_masked=True,
|
include_masked=True,
|
||||||
show_progress=False,
|
show_progress=False,
|
||||||
no_matches_fatal=False,
|
no_matches_fatal=False,
|
||||||
)
|
)
|
||||||
except AmbiguousPackageName:
|
except AmbiguousPackageName:
|
||||||
logger.error(" Ambiguous package name")
|
logger.error(" Ambiguous package name")
|
||||||
@ -240,7 +244,7 @@ def process_package(query, diff=False):
|
|||||||
|
|
||||||
matches = sorted(matches)
|
matches = sorted(matches)
|
||||||
package = matches.pop()
|
package = matches.pop()
|
||||||
if '9999' in package.version and len(matches) > 0:
|
if "9999" in package.version and len(matches) > 0:
|
||||||
package = matches.pop()
|
package = matches.pop()
|
||||||
|
|
||||||
watch_data = get_watch_data(package)
|
watch_data = get_watch_data(package)
|
||||||
@ -252,16 +256,21 @@ def process_package(query, diff=False):
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
import optparse
|
import optparse
|
||||||
|
|
||||||
p = optparse.OptionParser(
|
p = optparse.OptionParser(
|
||||||
usage="usage: %prog <package> [<package> [...]]",
|
usage="usage: %prog <package> [<package> [...]]",
|
||||||
)
|
)
|
||||||
p.add_option('-d', '--diff', action="store_true", dest="diff",
|
p.add_option(
|
||||||
default=False,
|
"-d",
|
||||||
help="Outputs a diff")
|
"--diff",
|
||||||
|
action="store_true",
|
||||||
|
dest="diff",
|
||||||
|
default=False,
|
||||||
|
help="Outputs a diff",
|
||||||
|
)
|
||||||
opts, packages = p.parse_args()
|
opts, packages = p.parse_args()
|
||||||
|
|
||||||
logging.basicConfig(stream=sys.stderr, level=logging.INFO,
|
logging.basicConfig(stream=sys.stderr, level=logging.INFO, format="%(message)s")
|
||||||
format='%(message)s')
|
|
||||||
|
|
||||||
for package in packages:
|
for package in packages:
|
||||||
logger.info("Processing %s..." % package)
|
logger.info("Processing %s..." % package)
|
||||||
@ -269,5 +278,6 @@ def main():
|
|||||||
if result:
|
if result:
|
||||||
sys.stdout.write(result)
|
sys.stdout.write(result)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
@ -9,30 +9,30 @@ from ast import literal_eval
|
|||||||
|
|
||||||
|
|
||||||
CONFIG = {
|
CONFIG = {
|
||||||
'nocolor': False,
|
"nocolor": False,
|
||||||
'quiet': False,
|
"quiet": False,
|
||||||
'verbose': 1,
|
"verbose": 1,
|
||||||
'debug': False,
|
"debug": False,
|
||||||
'brute-force': 3,
|
"brute-force": 3,
|
||||||
'brute-force-recursive': True,
|
"brute-force-recursive": True,
|
||||||
'brute-force-false-watermark': 50,
|
"brute-force-false-watermark": 50,
|
||||||
'scan-dir': True,
|
"scan-dir": True,
|
||||||
'oneshot': True,
|
"oneshot": True,
|
||||||
'user-agent': 'escan (http://euscan.iksaif.net)',
|
"user-agent": "escan (http://euscan.iksaif.net)",
|
||||||
'skip-robots-txt': False,
|
"skip-robots-txt": False,
|
||||||
'cache': False,
|
"cache": False,
|
||||||
'format': None,
|
"format": None,
|
||||||
'indent': 2,
|
"indent": 2,
|
||||||
'progress': False,
|
"progress": False,
|
||||||
'mirror': False,
|
"mirror": False,
|
||||||
'ignore-pre-release': False,
|
"ignore-pre-release": False,
|
||||||
'ignore-pre-release-if-stable': False,
|
"ignore-pre-release-if-stable": False,
|
||||||
'ebuild-uri': False,
|
"ebuild-uri": False,
|
||||||
'handlers-exclude': [],
|
"handlers-exclude": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
config.read(['/etc/euscan.conf', os.path.expanduser('~/.euscan.conf')])
|
config.read(["/etc/euscan.conf", os.path.expanduser("~/.euscan.conf")])
|
||||||
if config.has_section("euscan"):
|
if config.has_section("euscan"):
|
||||||
for key, value in config.items("euscan"):
|
for key, value in config.items("euscan"):
|
||||||
if key in CONFIG:
|
if key in CONFIG:
|
||||||
@ -41,49 +41,50 @@ if config.has_section("euscan"):
|
|||||||
BLACKLIST_VERSIONS = [
|
BLACKLIST_VERSIONS = [
|
||||||
# Compatibility package for running binaries linked against a
|
# Compatibility package for running binaries linked against a
|
||||||
# pre gcc 3.4 libstdc++, won't be updated
|
# pre gcc 3.4 libstdc++, won't be updated
|
||||||
'>=sys-libs/libstdc++-v3-3.4',
|
">=sys-libs/libstdc++-v3-3.4",
|
||||||
# Actually older or incorrect
|
# Actually older or incorrect
|
||||||
'~app-backup/backup-manager-0.7.15',
|
"~app-backup/backup-manager-0.7.15",
|
||||||
'=x11-plugins/wmacpimon-001',
|
"=x11-plugins/wmacpimon-001",
|
||||||
]
|
]
|
||||||
|
|
||||||
BLACKLIST_PACKAGES = [
|
BLACKLIST_PACKAGES = [
|
||||||
# These kernels are almost dead
|
# These kernels are almost dead
|
||||||
'sys-kernel/xbox-sources',
|
"sys-kernel/xbox-sources",
|
||||||
]
|
]
|
||||||
|
|
||||||
SCANDIR_BLACKLIST_URLS = [
|
SCANDIR_BLACKLIST_URLS = [
|
||||||
'mirror://rubygems/(.*)', # Not browsable
|
"mirror://rubygems/(.*)", # Not browsable
|
||||||
'mirror://gentoo/(.*)' # Directory too big
|
"mirror://gentoo/(.*)", # Directory too big
|
||||||
]
|
]
|
||||||
|
|
||||||
BRUTEFORCE_BLACKLIST_PACKAGES = [
|
BRUTEFORCE_BLACKLIST_PACKAGES = [
|
||||||
# infinite loop any
|
# infinite loop any
|
||||||
# http://plone.org/products/plonepopoll/releases/*/plonepopoll-2-6-1.tgz
|
# http://plone.org/products/plonepopoll/releases/*/plonepopoll-2-6-1.tgz
|
||||||
# link will work
|
# link will work
|
||||||
'net-zope/plonepopoll'
|
"net-zope/plonepopoll"
|
||||||
]
|
]
|
||||||
|
|
||||||
BRUTEFORCE_BLACKLIST_URLS = [
|
BRUTEFORCE_BLACKLIST_URLS = [
|
||||||
'http://hydra.nixos.org/build/(.*)', # infinite loop
|
"http://hydra.nixos.org/build/(.*)", # infinite loop
|
||||||
# Doesn't respect 404, infinite loop
|
# Doesn't respect 404, infinite loop
|
||||||
'http://www.rennings.net/gentoo/distfiles/(.*)',
|
"http://www.rennings.net/gentoo/distfiles/(.*)",
|
||||||
'http://art.gnome.org/download/(.*)',
|
"http://art.gnome.org/download/(.*)",
|
||||||
'http://barelysufficient.org/~olemarkus/(.*)',
|
"http://barelysufficient.org/~olemarkus/(.*)",
|
||||||
'http://olemarkus.org/~olemarkus/(.*)',
|
"http://olemarkus.org/~olemarkus/(.*)",
|
||||||
]
|
]
|
||||||
|
|
||||||
ROBOTS_TXT_BLACKLIST_DOMAINS = [
|
ROBOTS_TXT_BLACKLIST_DOMAINS = [
|
||||||
'(.*)sourceforge(.*)',
|
"(.*)sourceforge(.*)",
|
||||||
'(.*)github.com',
|
"(.*)github.com",
|
||||||
'(.*)qt\.nokia\.com(.*)',
|
"(.*)qt\.nokia\.com(.*)",
|
||||||
'(.*)chromium\.org(.*)',
|
"(.*)chromium\.org(.*)",
|
||||||
'(.*)nodejs\.org(.*)',
|
"(.*)nodejs\.org(.*)",
|
||||||
'(.*)download\.mono-project\.com(.*)',
|
"(.*)download\.mono-project\.com(.*)",
|
||||||
'(.*)fedorahosted\.org(.*)',
|
"(.*)fedorahosted\.org(.*)",
|
||||||
'(.*)download\.tuxfamily\.org(.*)',
|
"(.*)download\.tuxfamily\.org(.*)",
|
||||||
'(.*)festvox\.org(.*)',
|
"(.*)festvox\.org(.*)",
|
||||||
]
|
]
|
||||||
|
|
||||||
from euscan.out import EuscanOutput
|
from euscan.out import EuscanOutput
|
||||||
|
|
||||||
output = EuscanOutput(CONFIG)
|
output = EuscanOutput(CONFIG)
|
||||||
|
@ -24,12 +24,10 @@ def package_from_ebuild(ebuild):
|
|||||||
# since the canonical path returned from os.getcwd() may may be
|
# since the canonical path returned from os.getcwd() may may be
|
||||||
# unusable in cases where the directory stucture is built from
|
# unusable in cases where the directory stucture is built from
|
||||||
# symlinks.
|
# symlinks.
|
||||||
pwd = os.environ.get('PWD', '')
|
pwd = os.environ.get("PWD", "")
|
||||||
if sys.hexversion < 0x3000000:
|
if sys.hexversion < 0x3000000:
|
||||||
pwd = _unicode_decode(pwd, encoding=_encodings['content'],
|
pwd = _unicode_decode(pwd, encoding=_encodings["content"], errors="strict")
|
||||||
errors='strict')
|
if pwd and pwd != mycwd and os.path.realpath(pwd) == mycwd:
|
||||||
if pwd and pwd != mycwd and \
|
|
||||||
os.path.realpath(pwd) == mycwd:
|
|
||||||
mycwd = portage.normalize_path(pwd)
|
mycwd = portage.normalize_path(pwd)
|
||||||
ebuild = os.path.join(mycwd, ebuild)
|
ebuild = os.path.join(mycwd, ebuild)
|
||||||
|
|
||||||
@ -38,22 +36,29 @@ def package_from_ebuild(ebuild):
|
|||||||
# subdirectories of the base can be built from symlinks (like crossdev
|
# subdirectories of the base can be built from symlinks (like crossdev
|
||||||
# does).
|
# does).
|
||||||
ebuild_portdir = os.path.realpath(
|
ebuild_portdir = os.path.realpath(
|
||||||
os.path.dirname(os.path.dirname(os.path.dirname(ebuild))))
|
os.path.dirname(os.path.dirname(os.path.dirname(ebuild)))
|
||||||
|
)
|
||||||
ebuild = os.path.join(ebuild_portdir, *ebuild.split(os.path.sep)[-3:])
|
ebuild = os.path.join(ebuild_portdir, *ebuild.split(os.path.sep)[-3:])
|
||||||
vdb_path = os.path.join(portage.settings['ROOT'], VDB_PATH)
|
vdb_path = os.path.join(portage.settings["ROOT"], VDB_PATH)
|
||||||
|
|
||||||
# Make sure that portdb.findname() returns the correct ebuild.
|
# Make sure that portdb.findname() returns the correct ebuild.
|
||||||
if ebuild_portdir != vdb_path and \
|
if ebuild_portdir != vdb_path and ebuild_portdir not in portage.portdb.porttrees:
|
||||||
ebuild_portdir not in portage.portdb.porttrees:
|
|
||||||
if sys.hexversion >= 0x3000000:
|
if sys.hexversion >= 0x3000000:
|
||||||
os.environ["PORTDIR_OVERLAY"] = \
|
os.environ["PORTDIR_OVERLAY"] = (
|
||||||
os.environ.get("PORTDIR_OVERLAY", "") + \
|
os.environ.get("PORTDIR_OVERLAY", "")
|
||||||
" " + _shell_quote(ebuild_portdir)
|
+ " "
|
||||||
|
+ _shell_quote(ebuild_portdir)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
os.environ["PORTDIR_OVERLAY"] = \
|
os.environ["PORTDIR_OVERLAY"] = (
|
||||||
os.environ.get("PORTDIR_OVERLAY", "") + \
|
os.environ.get("PORTDIR_OVERLAY", "")
|
||||||
" " + _unicode_encode(_shell_quote(ebuild_portdir),
|
+ " "
|
||||||
encoding=_encodings['content'], errors='strict')
|
+ _unicode_encode(
|
||||||
|
_shell_quote(ebuild_portdir),
|
||||||
|
encoding=_encodings["content"],
|
||||||
|
errors="strict",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
portage.close_portdbapi_caches()
|
portage.close_portdbapi_caches()
|
||||||
importlib.reload(portage)
|
importlib.reload(portage)
|
||||||
|
@ -6,31 +6,27 @@ from euscan import CONFIG, output
|
|||||||
|
|
||||||
from portage.xml.metadata import MetaDataXML
|
from portage.xml.metadata import MetaDataXML
|
||||||
|
|
||||||
handlers = {'package': [], 'url': [], 'all': {}}
|
handlers = {"package": [], "url": [], "all": {}}
|
||||||
|
|
||||||
# autoimport all modules in this directory and append them to handlers list
|
# autoimport all modules in this directory and append them to handlers list
|
||||||
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
|
||||||
|
|
||||||
module = loader.find_spec(module_name).loader.load_module(module_name)
|
module = loader.find_spec(module_name).loader.load_module(module_name)
|
||||||
if not hasattr(module, 'HANDLER_NAME'):
|
if not hasattr(module, "HANDLER_NAME"):
|
||||||
continue
|
continue
|
||||||
if hasattr(module, 'scan_url'):
|
if hasattr(module, "scan_url"):
|
||||||
handlers['url'].append(module)
|
handlers["url"].append(module)
|
||||||
if hasattr(module, 'scan_pkg'):
|
if hasattr(module, "scan_pkg"):
|
||||||
handlers['package'].append(module)
|
handlers["package"].append(module)
|
||||||
handlers['all'][module.HANDLER_NAME] = module
|
handlers["all"][module.HANDLER_NAME] = module
|
||||||
|
|
||||||
|
|
||||||
# sort handlers by priority
|
# sort handlers by priority
|
||||||
def sort_handlers(handlers):
|
def sort_handlers(handlers):
|
||||||
return sorted(
|
return sorted(handlers, key=lambda handler: handler.PRIORITY, reverse=True)
|
||||||
handlers,
|
|
||||||
key=lambda handler: handler.PRIORITY,
|
|
||||||
reverse=True
|
|
||||||
)
|
|
||||||
|
|
||||||
handlers['package'] = sort_handlers(handlers['package'])
|
|
||||||
handlers['url'] = sort_handlers(handlers['url'])
|
handlers["package"] = sort_handlers(handlers["package"])
|
||||||
|
handlers["url"] = sort_handlers(handlers["url"])
|
||||||
|
|
||||||
|
|
||||||
def find_best_handler(kind, pkg, *args):
|
def find_best_handler(kind, pkg, *args):
|
||||||
@ -38,8 +34,9 @@ def find_best_handler(kind, pkg, *args):
|
|||||||
Find the best handler for the given package
|
Find the best handler for the given package
|
||||||
"""
|
"""
|
||||||
for handler in handlers[kind]:
|
for handler in handlers[kind]:
|
||||||
if (handler.HANDLER_NAME not in CONFIG["handlers-exclude"] and
|
if handler.HANDLER_NAME not in CONFIG[
|
||||||
handler.can_handle(pkg, *args)):
|
"handlers-exclude"
|
||||||
|
] and handler.can_handle(pkg, *args):
|
||||||
return handler
|
return handler
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -49,8 +46,8 @@ def find_handlers(kind, names):
|
|||||||
|
|
||||||
for name in names:
|
for name in names:
|
||||||
# Does this handler exist, and handle this kind of thing ? (pkg / url)
|
# Does this handler exist, and handle this kind of thing ? (pkg / url)
|
||||||
if name in handlers['all'] and handlers['all'][name] in handlers[kind]:
|
if name in handlers["all"] and handlers["all"][name] in handlers[kind]:
|
||||||
ret.append(handlers['all'][name])
|
ret.append(handlers["all"][name])
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@ -60,17 +57,16 @@ def get_metadata(pkg):
|
|||||||
|
|
||||||
pkg_metadata = None
|
pkg_metadata = None
|
||||||
|
|
||||||
meta_override = os.path.join('metadata', pkg.category, pkg.name,
|
meta_override = os.path.join("metadata", pkg.category, pkg.name, "metadata.xml")
|
||||||
'metadata.xml')
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if os.path.exists(meta_override):
|
if os.path.exists(meta_override):
|
||||||
pkg_metadata = MetaDataXML(meta_override)
|
pkg_metadata = MetaDataXML(meta_override)
|
||||||
output.einfo('Using custom metadata: %s' % meta_override)
|
output.einfo("Using custom metadata: %s" % meta_override)
|
||||||
if not pkg_metadata:
|
if not pkg_metadata:
|
||||||
pkg_metadata = pkg.metadata
|
pkg_metadata = pkg.metadata
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
output.ewarn('Error when fetching metadata: %s' % str(e))
|
output.ewarn("Error when fetching metadata: %s" % str(e))
|
||||||
|
|
||||||
if not pkg_metadata:
|
if not pkg_metadata:
|
||||||
return {}
|
return {}
|
||||||
@ -79,13 +75,13 @@ def get_metadata(pkg):
|
|||||||
for upstream in pkg_metadata._xml_tree.findall("upstream"):
|
for upstream in pkg_metadata._xml_tree.findall("upstream"):
|
||||||
for node in upstream.findall("watch"):
|
for node in upstream.findall("watch"):
|
||||||
options = dict(node.attrib)
|
options = dict(node.attrib)
|
||||||
options['data'] = node.text
|
options["data"] = node.text
|
||||||
|
|
||||||
if "type" in options:
|
if "type" in options:
|
||||||
handler = options['type']
|
handler = options["type"]
|
||||||
else:
|
else:
|
||||||
handler = "url"
|
handler = "url"
|
||||||
options['type'] = "url"
|
options["type"] = "url"
|
||||||
|
|
||||||
for key in ["versionmangle", "downloadurlmangle"]:
|
for key in ["versionmangle", "downloadurlmangle"]:
|
||||||
value = options.get(key, None)
|
value = options.get(key, None)
|
||||||
@ -103,10 +99,10 @@ def get_metadata(pkg):
|
|||||||
continue
|
continue
|
||||||
if handler in metadata:
|
if handler in metadata:
|
||||||
for i in range(len(metadata[handler])):
|
for i in range(len(metadata[handler])):
|
||||||
if not metadata[handler][i]['data']:
|
if not metadata[handler][i]["data"]:
|
||||||
metadata[handler][i]['data'] = node.text
|
metadata[handler][i]["data"] = node.text
|
||||||
else:
|
else:
|
||||||
metadata[handler] = [{'type': handler, 'data': node.text}]
|
metadata[handler] = [{"type": handler, "data": node.text}]
|
||||||
|
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
@ -145,24 +141,21 @@ def scan_url(pkg, urls, options, on_progress=None):
|
|||||||
|
|
||||||
output.einfo("SRC_URI is '%s'" % url)
|
output.einfo("SRC_URI is '%s'" % url)
|
||||||
|
|
||||||
if '://' not in url:
|
if "://" not in url:
|
||||||
output.einfo("Invalid url '%s'" % url)
|
output.einfo("Invalid url '%s'" % url)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url_handler = find_best_handler('url', pkg, url)
|
url_handler = find_best_handler("url", pkg, url)
|
||||||
if url_handler:
|
if url_handler:
|
||||||
for o in options:
|
for o in options:
|
||||||
versions += url_handler.scan_url(pkg, url, o)
|
versions += url_handler.scan_url(pkg, url, o)
|
||||||
else:
|
else:
|
||||||
output.eerror("Can't find a suitable handler!")
|
output.eerror("Can't find a suitable handler!")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
output.ewarn(
|
output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, str(e)))
|
||||||
"Handler failed: [%s] %s" %
|
|
||||||
(e.__class__.__name__, str(e))
|
|
||||||
)
|
|
||||||
|
|
||||||
if versions and CONFIG['oneshot']:
|
if versions and CONFIG["oneshot"]:
|
||||||
break
|
break
|
||||||
|
|
||||||
if on_progress and progress_available > 0:
|
if on_progress and progress_available > 0:
|
||||||
@ -178,15 +171,15 @@ def scan(pkg, urls, on_progress=None):
|
|||||||
in url handling.
|
in url handling.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not CONFIG['quiet'] and not CONFIG['format']:
|
if not CONFIG["quiet"] and not CONFIG["format"]:
|
||||||
sys.stdout.write('\n')
|
sys.stdout.write("\n")
|
||||||
|
|
||||||
metadata = get_metadata(pkg)
|
metadata = get_metadata(pkg)
|
||||||
versions = []
|
versions = []
|
||||||
|
|
||||||
pkg_handlers = find_handlers('package', list(metadata.keys()))
|
pkg_handlers = find_handlers("package", list(metadata.keys()))
|
||||||
if not pkg_handlers:
|
if not pkg_handlers:
|
||||||
pkg_handler = find_best_handler('package', pkg)
|
pkg_handler = find_best_handler("package", pkg)
|
||||||
if pkg_handler:
|
if pkg_handler:
|
||||||
pkg_handlers = [pkg_handler]
|
pkg_handlers = [pkg_handler]
|
||||||
|
|
||||||
@ -201,17 +194,17 @@ def scan(pkg, urls, on_progress=None):
|
|||||||
|
|
||||||
|
|
||||||
def mangle(kind, name, string):
|
def mangle(kind, name, string):
|
||||||
if name not in handlers['all']:
|
if name not in handlers["all"]:
|
||||||
return None
|
return None
|
||||||
handler = handlers['all'][name]
|
handler = handlers["all"][name]
|
||||||
if not hasattr(handler, 'mangle_%s' % kind):
|
if not hasattr(handler, "mangle_%s" % kind):
|
||||||
return None
|
return None
|
||||||
return getattr(handler, 'mangle_%s' % kind)(string)
|
return getattr(handler, "mangle_%s" % kind)(string)
|
||||||
|
|
||||||
|
|
||||||
def mangle_url(name, string):
|
def mangle_url(name, string):
|
||||||
return mangle('url', name, string)
|
return mangle("url", name, string)
|
||||||
|
|
||||||
|
|
||||||
def mangle_version(name, string):
|
def mangle_version(name, string):
|
||||||
return mangle('version', name, string)
|
return mangle("version", name, string)
|
||||||
|
@ -36,19 +36,13 @@ def scan_url(pkg, url, options):
|
|||||||
project_page = "http://developer.berlios.de/projects/%s" % project
|
project_page = "http://developer.berlios.de/projects/%s" % project
|
||||||
content = urllib.request.urlopen(project_page).read()
|
content = urllib.request.urlopen(project_page).read()
|
||||||
|
|
||||||
project_id = re.search(
|
project_id = re.search(r"/project/filelist.php\?group_id=(\d+)", content).group(1)
|
||||||
r"/project/filelist.php\?group_id=(\d+)",
|
|
||||||
content
|
|
||||||
).group(1)
|
|
||||||
|
|
||||||
base_url = (
|
base_url = (
|
||||||
"http://developer.berlios.de/project/filelist.php?group_id=%s" %
|
"http://developer.berlios.de/project/filelist.php?group_id=%s" % project_id
|
||||||
project_id
|
|
||||||
)
|
)
|
||||||
|
|
||||||
file_pattern = regex_from_template(
|
file_pattern = regex_from_template(filename.replace(ver, "${PV}"))
|
||||||
filename.replace(ver, "${PV}")
|
|
||||||
)
|
|
||||||
|
|
||||||
result = url_scan(pkg, base_url, file_pattern)
|
result = url_scan(pkg, base_url, file_pattern)
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ _cpan_package_name_re = re.compile("mirror://cpan/authors/.*/([^/.]*).*")
|
|||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('mirror://cpan/')
|
return url and url.startswith("mirror://cpan/")
|
||||||
|
|
||||||
|
|
||||||
def guess_package(cp, url):
|
def guess_package(cp, url):
|
||||||
@ -24,7 +24,7 @@ def guess_package(cp, url):
|
|||||||
if match:
|
if match:
|
||||||
pkg = match.group(1)
|
pkg = match.group(1)
|
||||||
try:
|
try:
|
||||||
cp, ver, rev = portage.pkgsplit('fake/' + pkg)
|
cp, ver, rev = portage.pkgsplit("fake/" + pkg)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ def guess_package(cp, url):
|
|||||||
|
|
||||||
|
|
||||||
def mangle_version(up_pv):
|
def mangle_version(up_pv):
|
||||||
if up_pv.startswith('v'):
|
if up_pv.startswith("v"):
|
||||||
return up_pv[1:]
|
return up_pv[1:]
|
||||||
|
|
||||||
# clean
|
# clean
|
||||||
@ -53,14 +53,14 @@ def mangle_version(up_pv):
|
|||||||
if len(splitted) == 2: # Split second part is sub-groups
|
if len(splitted) == 2: # Split second part is sub-groups
|
||||||
part = splitted.pop()
|
part = splitted.pop()
|
||||||
for i in range(0, len(part), 3):
|
for i in range(0, len(part), 3):
|
||||||
splitted.append(part[i:i + 3])
|
splitted.append(part[i : i + 3])
|
||||||
|
|
||||||
if len(splitted) == 2: # add last group if it's missing
|
if len(splitted) == 2: # add last group if it's missing
|
||||||
splitted.append("0")
|
splitted.append("0")
|
||||||
|
|
||||||
groups = [splitted[0]]
|
groups = [splitted[0]]
|
||||||
for part in splitted[1:-1]:
|
for part in splitted[1:-1]:
|
||||||
groups.append(part.ljust(3, "0"))
|
groups.append(part.ljust(3, "0"))
|
||||||
if splitted[-1] == "0":
|
if splitted[-1] == "0":
|
||||||
groups.append(splitted[-1])
|
groups.append(splitted[-1])
|
||||||
else:
|
else:
|
||||||
@ -78,11 +78,11 @@ def mangle_version(up_pv):
|
|||||||
|
|
||||||
|
|
||||||
def cpan_mangle_version(pv):
|
def cpan_mangle_version(pv):
|
||||||
pos = pv.find('.')
|
pos = pv.find(".")
|
||||||
if pos <= 0:
|
if pos <= 0:
|
||||||
return pv
|
return pv
|
||||||
up_pv = pv.replace('.', '')
|
up_pv = pv.replace(".", "")
|
||||||
up_pv = up_pv[0:pos] + '.' + up_pv[pos:]
|
up_pv = up_pv[0:pos] + "." + up_pv[pos:]
|
||||||
return up_pv
|
return up_pv
|
||||||
|
|
||||||
|
|
||||||
@ -99,17 +99,17 @@ def scan_url(pkg, url, options):
|
|||||||
|
|
||||||
output.einfo("Using CPAN API: %s", remote_pkg)
|
output.einfo("Using CPAN API: %s", remote_pkg)
|
||||||
|
|
||||||
return scan_pkg(pkg, {'data': remote_pkg})
|
return scan_pkg(pkg, {"data": remote_pkg})
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
remote_pkg = options['data']
|
remote_pkg = options["data"]
|
||||||
|
|
||||||
# Defaults to CPAN mangling rules
|
# Defaults to CPAN mangling rules
|
||||||
if 'versionmangle' not in options:
|
if "versionmangle" not in options:
|
||||||
options['versionmangle'] = ['cpan', 'gentoo']
|
options["versionmangle"] = ["cpan", "gentoo"]
|
||||||
|
|
||||||
url = 'http://search.cpan.org/api/dist/%s' % remote_pkg
|
url = "http://search.cpan.org/api/dist/%s" % remote_pkg
|
||||||
cp, ver, rev = pkg.cp, pkg.version, pkg.revision
|
cp, ver, rev = pkg.cp, pkg.version, pkg.revision
|
||||||
m_ver = cpan_mangle_version(ver)
|
m_ver = cpan_mangle_version(ver)
|
||||||
|
|
||||||
@ -128,19 +128,19 @@ def scan_pkg(pkg, options):
|
|||||||
data = fp.read()
|
data = fp.read()
|
||||||
data = json.loads(data)
|
data = json.loads(data)
|
||||||
|
|
||||||
if 'releases' not in data:
|
if "releases" not in data:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
for version in data['releases']:
|
for version in data["releases"]:
|
||||||
#if version['status'] == 'testing':
|
# if version['status'] == 'testing':
|
||||||
# continue
|
# continue
|
||||||
|
|
||||||
up_pv = version['version']
|
up_pv = version["version"]
|
||||||
pv = mangling.mangle_version(up_pv, options)
|
pv = mangling.mangle_version(up_pv, options)
|
||||||
|
|
||||||
if up_pv.startswith('v'):
|
if up_pv.startswith("v"):
|
||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
@ -148,11 +148,11 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
|
url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
|
||||||
version['cpanid'][0],
|
version["cpanid"][0],
|
||||||
version['cpanid'][0:1],
|
version["cpanid"][0:1],
|
||||||
version['cpanid'],
|
version["cpanid"],
|
||||||
version['archive']
|
version["archive"],
|
||||||
)
|
)
|
||||||
|
|
||||||
url = mangling.mangle_url(url, options)
|
url = mangling.mangle_url(url, options)
|
||||||
|
@ -19,7 +19,7 @@ def can_handle(pkg, url=None):
|
|||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||||
|
|
||||||
packages_url, package_name = options['data'].strip().split(" ", 1)
|
packages_url, package_name = options["data"].strip().split(" ", 1)
|
||||||
|
|
||||||
output.einfo("Using Debian Packages: " + packages_url)
|
output.einfo("Using Debian Packages: " + packages_url)
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ def can_handle(pkg, url=None):
|
|||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||||
|
|
||||||
package = options['data'].strip()
|
package = options["data"].strip()
|
||||||
|
|
||||||
output.einfo("Using FreeCode handler: " + package)
|
output.einfo("Using FreeCode handler: " + package)
|
||||||
|
|
||||||
@ -25,8 +25,7 @@ def scan_pkg(pkg, options):
|
|||||||
content = str(fp.read())
|
content = str(fp.read())
|
||||||
|
|
||||||
result = re.findall(
|
result = re.findall(
|
||||||
r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package,
|
r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package, content
|
||||||
content
|
|
||||||
)
|
)
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
@ -34,15 +33,15 @@ def scan_pkg(pkg, options):
|
|||||||
pv = mangling.mangle_version(up_pv, options)
|
pv = mangling.mangle_version(up_pv, options)
|
||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases/%s" %
|
fp = urllib.request.urlopen(
|
||||||
(package, release_id))
|
"http://freecode.com/projects/%s/releases/%s" % (package, release_id)
|
||||||
|
)
|
||||||
content = str(fp.read())
|
content = str(fp.read())
|
||||||
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
|
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
|
||||||
fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
|
fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
|
||||||
content = str(fp.read())
|
content = str(fp.read())
|
||||||
url = re.findall(
|
url = re.findall(
|
||||||
r'In case it doesn\'t, click here: <a href="([^"]+)"',
|
r'In case it doesn\'t, click here: <a href="([^"]+)"', content
|
||||||
content
|
|
||||||
)[0]
|
)[0]
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
return ret
|
return ret
|
||||||
|
@ -11,8 +11,15 @@ except ImportError:
|
|||||||
|
|
||||||
import portage
|
import portage
|
||||||
|
|
||||||
from euscan import output, helpers, mangling, CONFIG, SCANDIR_BLACKLIST_URLS, \
|
from euscan import (
|
||||||
BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
|
output,
|
||||||
|
helpers,
|
||||||
|
mangling,
|
||||||
|
CONFIG,
|
||||||
|
SCANDIR_BLACKLIST_URLS,
|
||||||
|
BRUTEFORCE_BLACKLIST_PACKAGES,
|
||||||
|
BRUTEFORCE_BLACKLIST_URLS,
|
||||||
|
)
|
||||||
|
|
||||||
HANDLER_NAME = "generic"
|
HANDLER_NAME = "generic"
|
||||||
CONFIDENCE = 45
|
CONFIDENCE = 45
|
||||||
@ -55,7 +62,7 @@ def scan_html(data, url, pattern):
|
|||||||
soup = BeautifulSoup(data, features="lxml")
|
soup = BeautifulSoup(data, features="lxml")
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
for link in soup.findAll('a'):
|
for link in soup.findAll("a"):
|
||||||
href = link.get("href")
|
href = link.get("href")
|
||||||
if not href:
|
if not href:
|
||||||
continue
|
continue
|
||||||
@ -66,11 +73,9 @@ def scan_html(data, url, pattern):
|
|||||||
match = re.search(pattern, href, re.I)
|
match = re.search(pattern, href, re.I)
|
||||||
if match:
|
if match:
|
||||||
results.append(
|
results.append(
|
||||||
(".".join([x for x in match.groups() if x is not None]),
|
(".".join([x for x in match.groups() if x is not None]), match.group(0))
|
||||||
match.group(0))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@ -83,8 +88,7 @@ def scan_ftp(data, url, pattern):
|
|||||||
match = re.search(pattern, line, re.I)
|
match = re.search(pattern, line, re.I)
|
||||||
if match:
|
if match:
|
||||||
results.append(
|
results.append(
|
||||||
(".".join([x for x in match.groups() if x is not None]),
|
(".".join([x for x in match.groups() if x is not None]), match.group(0))
|
||||||
match.group(0))
|
|
||||||
)
|
)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@ -116,7 +120,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
|||||||
|
|
||||||
if re.search(b"<\s*a\s+[^>]*href", data, re.I):
|
if re.search(b"<\s*a\s+[^>]*href", data, re.I):
|
||||||
results.extend(scan_html(data, url, pattern))
|
results.extend(scan_html(data, url, pattern))
|
||||||
elif url.startswith('ftp://'):
|
elif url.startswith("ftp://"):
|
||||||
results.extend(scan_ftp(data, url, pattern))
|
results.extend(scan_ftp(data, url, pattern))
|
||||||
|
|
||||||
versions = []
|
versions = []
|
||||||
@ -136,8 +140,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
|||||||
versions.append((path, pv, HANDLER_NAME, confidence))
|
versions.append((path, pv, HANDLER_NAME, confidence))
|
||||||
|
|
||||||
if steps:
|
if steps:
|
||||||
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
|
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url, options)
|
||||||
options)
|
|
||||||
versions.extend(ret)
|
versions.extend(ret)
|
||||||
|
|
||||||
return versions
|
return versions
|
||||||
@ -160,16 +163,14 @@ def scan_url(pkg, url, options):
|
|||||||
if ver not in resolved_url:
|
if ver not in resolved_url:
|
||||||
newver = helpers.version_change_end_sep(ver)
|
newver = helpers.version_change_end_sep(ver)
|
||||||
if newver and newver in resolved_url:
|
if newver and newver in resolved_url:
|
||||||
output.einfo(
|
output.einfo("Version: using %s instead of %s" % (newver, ver))
|
||||||
"Version: using %s instead of %s" % (newver, ver)
|
|
||||||
)
|
|
||||||
ver = newver
|
ver = newver
|
||||||
|
|
||||||
template = helpers.template_from_url(resolved_url, ver)
|
template = helpers.template_from_url(resolved_url, ver)
|
||||||
if '${' not in template:
|
if "${" not in template:
|
||||||
output.einfo(
|
output.einfo(
|
||||||
"Url doesn't seems to depend on version: %s not found in %s" %
|
"Url doesn't seems to depend on version: %s not found in %s"
|
||||||
(ver, resolved_url)
|
% (ver, resolved_url)
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
@ -220,10 +221,11 @@ def brute_force(pkg, url):
|
|||||||
|
|
||||||
template = helpers.template_from_url(url, ver)
|
template = helpers.template_from_url(url, ver)
|
||||||
|
|
||||||
if '${PV}' not in template:
|
if "${PV}" not in template:
|
||||||
output.einfo(
|
output.einfo(
|
||||||
"Url doesn't seems to depend on full version: %s not found in %s" %
|
"Url doesn't seems to depend on full version: %s not found in %s"
|
||||||
(ver, url))
|
% (ver, url)
|
||||||
|
)
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
output.einfo("Brute forcing: %s" % template)
|
output.einfo("Brute forcing: %s" % template)
|
||||||
@ -250,19 +252,15 @@ def brute_force(pkg, url):
|
|||||||
|
|
||||||
if not infos:
|
if not infos:
|
||||||
continue
|
continue
|
||||||
confidence = confidence_score(try_url, url,
|
confidence = confidence_score(try_url, url, minimum=BRUTEFORCE_CONFIDENCE)
|
||||||
minimum=BRUTEFORCE_CONFIDENCE)
|
|
||||||
result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence])
|
result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence])
|
||||||
|
|
||||||
if len(result) > CONFIG['brute-force-false-watermark']:
|
if len(result) > CONFIG["brute-force-false-watermark"]:
|
||||||
output.einfo(
|
output.einfo("Broken server detected ! Skipping brute force.")
|
||||||
"Broken server detected ! Skipping brute force."
|
|
||||||
)
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if CONFIG["brute-force-recursive"]:
|
if CONFIG["brute-force-recursive"]:
|
||||||
for v in helpers.gen_versions(list(components),
|
for v in helpers.gen_versions(list(components), CONFIG["brute-force"]):
|
||||||
CONFIG["brute-force"]):
|
|
||||||
if v not in versions and tuple(v) not in done:
|
if v not in versions and tuple(v) not in done:
|
||||||
versions.append(v)
|
versions.append(v)
|
||||||
|
|
||||||
|
@ -12,18 +12,18 @@ PRIORITY = 90
|
|||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('mirror://github/')
|
return url and url.startswith("mirror://github/")
|
||||||
|
|
||||||
|
|
||||||
def guess_package(cp, url):
|
def guess_package(cp, url):
|
||||||
match = re.search('^mirror://github/(.*?)/(.*?)/(.*)$', url)
|
match = re.search("^mirror://github/(.*?)/(.*?)/(.*)$", url)
|
||||||
|
|
||||||
assert(match)
|
assert match
|
||||||
return (match.group(1), match.group(2), match.group(3))
|
return (match.group(1), match.group(2), match.group(3))
|
||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
'http://developer.github.com/v3/repos/downloads/'
|
"http://developer.github.com/v3/repos/downloads/"
|
||||||
|
|
||||||
user, project, filename = guess_package(pkg.cpv, url)
|
user, project, filename = guess_package(pkg.cpv, url)
|
||||||
|
|
||||||
@ -35,25 +35,27 @@ def scan_url(pkg, url, options):
|
|||||||
# now create a filename-matching regexp
|
# now create a filename-matching regexp
|
||||||
# XXX: supposedly replace first with (?P<foo>...)
|
# XXX: supposedly replace first with (?P<foo>...)
|
||||||
# and remaining ones with (?P=foo)
|
# and remaining ones with (?P=foo)
|
||||||
fnre = re.compile('^%s$' % \
|
fnre = re.compile("^%s$" % re.escape(filename).replace(re.escape(ver), "(.*?)"))
|
||||||
re.escape(filename).replace(re.escape(ver), '(.*?)'))
|
|
||||||
|
|
||||||
output.einfo("Using github API for: project=%s user=%s filename=%s" % \
|
output.einfo(
|
||||||
(project, user, filename))
|
"Using github API for: project=%s user=%s filename=%s"
|
||||||
|
% (project, user, filename)
|
||||||
|
)
|
||||||
|
|
||||||
dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
|
dlreq = urllib.request.urlopen(
|
||||||
(user, project))
|
"https://api.github.com/repos/%s/%s/downloads" % (user, project)
|
||||||
|
)
|
||||||
dls = json.load(dlreq)
|
dls = json.load(dlreq)
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
for dl in dls:
|
for dl in dls:
|
||||||
m = fnre.match(dl['name'])
|
m = fnre.match(dl["name"])
|
||||||
|
|
||||||
if m:
|
if m:
|
||||||
pv = mangling.mangle_version(m.group(1), options)
|
pv = mangling.mangle_version(m.group(1), options)
|
||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
url = mangling.mangle_url(dl['html_url'], options)
|
url = mangling.mangle_url(dl["html_url"], options)
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
return ret
|
return ret
|
||||||
|
@ -16,15 +16,15 @@ HANDLER_NAME = "gnome"
|
|||||||
CONFIDENCE = 100
|
CONFIDENCE = 100
|
||||||
PRIORITY = 90
|
PRIORITY = 90
|
||||||
|
|
||||||
GNOME_URL_SOURCE = 'http://ftp.gnome.org/pub/GNOME/sources'
|
GNOME_URL_SOURCE = "http://ftp.gnome.org/pub/GNOME/sources"
|
||||||
|
|
||||||
|
|
||||||
def can_handle(_pkg, url=None):
|
def can_handle(_pkg, url=None):
|
||||||
return url and url.startswith('mirror://gnome/')
|
return url and url.startswith("mirror://gnome/")
|
||||||
|
|
||||||
|
|
||||||
def guess_package(cp, url):
|
def guess_package(cp, url):
|
||||||
match = re.search('mirror://gnome/sources/([^/]+)/.*', url)
|
match = re.search("mirror://gnome/sources/([^/]+)/.*", url)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
@ -34,27 +34,27 @@ def guess_package(cp, url):
|
|||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
'http://ftp.gnome.org/pub/GNOME/sources/'
|
"http://ftp.gnome.org/pub/GNOME/sources/"
|
||||||
package = {
|
package = {
|
||||||
'data': guess_package(pkg.cpv, url),
|
"data": guess_package(pkg.cpv, url),
|
||||||
'type': 'gnome',
|
"type": "gnome",
|
||||||
}
|
}
|
||||||
return scan_pkg(pkg, package)
|
return scan_pkg(pkg, package)
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
package = options['data']
|
package = options["data"]
|
||||||
|
|
||||||
output.einfo("Using Gnome json cache: " + package)
|
output.einfo("Using Gnome json cache: " + package)
|
||||||
|
|
||||||
fp = urllib.request.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
|
fp = urllib.request.urlopen("/".join([GNOME_URL_SOURCE, package, "cache.json"]))
|
||||||
content = fp.read()
|
content = fp.read()
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
cache = json.loads(content, encoding='ascii')
|
cache = json.loads(content, encoding="ascii")
|
||||||
|
|
||||||
if cache[0] != 4:
|
if cache[0] != 4:
|
||||||
output.eerror('Unknow cache format detected')
|
output.eerror("Unknow cache format detected")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
versions = cache[2][package]
|
versions = cache[2][package]
|
||||||
@ -72,13 +72,12 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
up_files = cache[1][package][up_pv]
|
up_files = cache[1][package][up_pv]
|
||||||
for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'):
|
for tarball_comp in ("tar.xz", "tar.bz2", "tar.gz"):
|
||||||
if tarball_comp in up_files:
|
if tarball_comp in up_files:
|
||||||
url = '/'.join([GNOME_URL_SOURCE, package,
|
url = "/".join([GNOME_URL_SOURCE, package, up_files[tarball_comp]])
|
||||||
up_files[tarball_comp]])
|
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
output.ewarn('No tarball for release %s' % up_pv)
|
output.ewarn("No tarball for release %s" % up_pv)
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
@ -23,6 +23,7 @@ def can_handle(pkg, url=None):
|
|||||||
|
|
||||||
return re.match(package_name_regex, url)
|
return re.match(package_name_regex, url)
|
||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
output.einfo("Using Google Code handler")
|
output.einfo("Using Google Code handler")
|
||||||
|
|
||||||
@ -31,9 +32,7 @@ def scan_url(pkg, url, options):
|
|||||||
package_name = re.match(package_name_regex, url).group(1)
|
package_name = re.match(package_name_regex, url).group(1)
|
||||||
base_url = "http://code.google.com/p/%s/downloads/list" % package_name
|
base_url = "http://code.google.com/p/%s/downloads/list" % package_name
|
||||||
|
|
||||||
file_pattern = regex_from_template(
|
file_pattern = regex_from_template(url.split("/")[-1].replace(ver, "${PV}"))
|
||||||
url.split("/")[-1].replace(ver, "${PV}")
|
|
||||||
)
|
|
||||||
|
|
||||||
result = url_scan(pkg, base_url, file_pattern)
|
result = url_scan(pkg, base_url, file_pattern)
|
||||||
|
|
||||||
|
@ -6,14 +6,14 @@ HANDLER_NAME = "kde"
|
|||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url):
|
def can_handle(pkg, url):
|
||||||
return url and url.startswith('mirror://kde/')
|
return url and url.startswith("mirror://kde/")
|
||||||
|
|
||||||
|
|
||||||
def clean_results(results):
|
def clean_results(results):
|
||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
for path, version, _, confidence in results:
|
for path, version, _, confidence in results:
|
||||||
if version == '5SUMS':
|
if version == "5SUMS":
|
||||||
continue
|
continue
|
||||||
ret.append((path, version, HANDLER_NAME, confidence))
|
ret.append((path, version, HANDLER_NAME, confidence))
|
||||||
|
|
||||||
@ -23,16 +23,15 @@ def clean_results(results):
|
|||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
results = generic.scan(pkg.cpv, url)
|
results = generic.scan(pkg.cpv, url)
|
||||||
|
|
||||||
if generic.startswith('mirror://kde/unstable/'):
|
if generic.startswith("mirror://kde/unstable/"):
|
||||||
url = generic.replace('mirror://kde/unstable/', 'mirror://kde/stable/')
|
url = generic.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||||
results += generic.scan(pkg.cpv, url)
|
results += generic.scan(pkg.cpv, url)
|
||||||
|
|
||||||
if not results: # if nothing was found go brute forcing
|
if not results: # if nothing was found go brute forcing
|
||||||
results = generic.brute_force(pkg.cpv, url)
|
results = generic.brute_force(pkg.cpv, url)
|
||||||
|
|
||||||
if generic.startswith('mirror://kde/unstable/'):
|
if generic.startswith("mirror://kde/unstable/"):
|
||||||
url = generic.replace('mirror://kde/unstable/',
|
url = generic.replace("mirror://kde/unstable/", "mirror://kde/stable/")
|
||||||
'mirror://kde/stable/')
|
|
||||||
results += generic.brute_force(pkg.cpv, url)
|
results += generic.brute_force(pkg.cpv, url)
|
||||||
|
|
||||||
return clean_results(results)
|
return clean_results(results)
|
||||||
|
@ -6,7 +6,8 @@ PRIORITY = 90
|
|||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('http://%s.php.net/get/' % HANDLER_NAME)
|
return url and url.startswith("http://%s.php.net/get/" % HANDLER_NAME)
|
||||||
|
|
||||||
|
|
||||||
scan_url = php.scan_url
|
scan_url = php.scan_url
|
||||||
scan_pkg = php.scan_pkg
|
scan_pkg = php.scan_pkg
|
||||||
|
@ -4,8 +4,10 @@ HANDLER_NAME = "pecl"
|
|||||||
CONFIDENCE = 100
|
CONFIDENCE = 100
|
||||||
PRIORITY = 90
|
PRIORITY = 90
|
||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('http://%s.php.net/get/' % HANDLER_NAME)
|
return url and url.startswith("http://%s.php.net/get/" % HANDLER_NAME)
|
||||||
|
|
||||||
|
|
||||||
scan_url = php.scan_url
|
scan_url = php.scan_url
|
||||||
scan_pkg = php.scan_pkg
|
scan_pkg = php.scan_pkg
|
||||||
|
@ -9,11 +9,13 @@ HANDLER_NAME = "php"
|
|||||||
CONFIDENCE = 100
|
CONFIDENCE = 100
|
||||||
PRIORITY = 90
|
PRIORITY = 90
|
||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def guess_package_and_channel(cp, url):
|
def guess_package_and_channel(cp, url):
|
||||||
match = re.search('http://(.*)\.php\.net/get/(.*)-(.*).tgz', url)
|
match = re.search("http://(.*)\.php\.net/get/(.*)-(.*).tgz", url)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
host = match.group(1)
|
host = match.group(1)
|
||||||
@ -26,15 +28,16 @@ def guess_package_and_channel(cp, url):
|
|||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
package, channel = guess_package_and_channel(pkg.cp, url)
|
package, channel = guess_package_and_channel(pkg.cp, url)
|
||||||
return scan_pkg(pkg, {'type' : channel, 'data' : package })
|
return scan_pkg(pkg, {"type": channel, "data": package})
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
cp, ver, rev = pkg.cp, pkg.version, pkg.revision
|
cp, ver, rev = pkg.cp, pkg.version, pkg.revision
|
||||||
|
|
||||||
package = options['data']
|
package = options["data"]
|
||||||
channel = options['type']
|
channel = options["type"]
|
||||||
|
|
||||||
url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower())
|
url = "http://%s.php.net/rest/r/%s/allreleases.xml" % (channel, package.lower())
|
||||||
|
|
||||||
output.einfo("Using: " + url)
|
output.einfo("Using: " + url)
|
||||||
|
|
||||||
@ -61,7 +64,7 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
|
url = "http://%s.php.net/get/%s-%s.tgz" % (channel, package, up_pv)
|
||||||
url = mangling.mangle_url(url, options)
|
url = mangling.mangle_url(url, options)
|
||||||
|
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
|
@ -11,11 +11,11 @@ PRIORITY = 90
|
|||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('mirror://pypi/')
|
return url and url.startswith("mirror://pypi/")
|
||||||
|
|
||||||
|
|
||||||
def guess_package(cp, url):
|
def guess_package(cp, url):
|
||||||
match = re.search('mirror://pypi/\w+/(.*)/.*', url)
|
match = re.search("mirror://pypi/\w+/(.*)/.*", url)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
@ -25,18 +25,18 @@ def guess_package(cp, url):
|
|||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
'http://wiki.python.org/moin/PyPiXmlRpc'
|
"http://wiki.python.org/moin/PyPiXmlRpc"
|
||||||
|
|
||||||
package = guess_package(pkg.cpv, url)
|
package = guess_package(pkg.cpv, url)
|
||||||
return scan_pkg(pkg, {'data': package})
|
return scan_pkg(pkg, {"data": package})
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
package = options['data']
|
package = options["data"]
|
||||||
|
|
||||||
output.einfo("Using PyPi XMLRPC: " + package)
|
output.einfo("Using PyPi XMLRPC: " + package)
|
||||||
|
|
||||||
client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
|
client = xmlrpc.client.ServerProxy("https://pypi.python.org/pypi")
|
||||||
versions = client.package_releases(package)
|
versions = client.package_releases(package)
|
||||||
|
|
||||||
if not versions:
|
if not versions:
|
||||||
@ -52,7 +52,6 @@ def scan_pkg(pkg, options):
|
|||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
urls = client.release_urls(package, up_pv)
|
urls = client.release_urls(package, up_pv)
|
||||||
urls = " ".join([mangling.mangle_url(infos['url'], options)
|
urls = " ".join([mangling.mangle_url(infos["url"], options) for infos in urls])
|
||||||
for infos in urls])
|
|
||||||
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
return ret
|
return ret
|
||||||
|
@ -11,13 +11,13 @@ PRIORITY = 90
|
|||||||
|
|
||||||
|
|
||||||
def can_handle(pkg, url=None):
|
def can_handle(pkg, url=None):
|
||||||
return url and url.startswith('mirror://rubygems/')
|
return url and url.startswith("mirror://rubygems/")
|
||||||
|
|
||||||
|
|
||||||
def guess_gem(cpv, url):
|
def guess_gem(cpv, url):
|
||||||
match = re.search('mirror://rubygems/(.*).gem', url)
|
match = re.search("mirror://rubygems/(.*).gem", url)
|
||||||
if match:
|
if match:
|
||||||
cpv = 'fake/%s' % match.group(1)
|
cpv = "fake/%s" % match.group(1)
|
||||||
|
|
||||||
ret = portage.pkgsplit(cpv)
|
ret = portage.pkgsplit(cpv)
|
||||||
if not ret:
|
if not ret:
|
||||||
@ -30,23 +30,22 @@ def guess_gem(cpv, url):
|
|||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
'http://guides.rubygems.org/rubygems-org-api/#gemversion'
|
"http://guides.rubygems.org/rubygems-org-api/#gemversion"
|
||||||
|
|
||||||
gem = guess_gem(pkg.cpv, url)
|
gem = guess_gem(pkg.cpv, url)
|
||||||
|
|
||||||
if not gem:
|
if not gem:
|
||||||
output.eerror("Can't guess gem name using %s and %s" % \
|
output.eerror("Can't guess gem name using %s and %s" % (pkg.cpv, url))
|
||||||
(pkg.cpv, url))
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
output.einfo("Using RubyGem API: %s" % gem)
|
output.einfo("Using RubyGem API: %s" % gem)
|
||||||
|
|
||||||
return scan_pkg(pkg, {'data': gem})
|
return scan_pkg(pkg, {"data": gem})
|
||||||
|
|
||||||
|
|
||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
gem = options['data']
|
gem = options["data"]
|
||||||
url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
|
url = "http://rubygems.org/api/v1/versions/%s.json" % gem
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
@ -65,11 +64,11 @@ def scan_pkg(pkg, options):
|
|||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
for version in versions:
|
for version in versions:
|
||||||
up_pv = version['number']
|
up_pv = version["number"]
|
||||||
pv = mangling.mangle_version(up_pv, options)
|
pv = mangling.mangle_version(up_pv, options)
|
||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
|
url = "http://rubygems.org/gems/%s-%s.gem" % (gem, up_pv)
|
||||||
url = mangling.mangle_url(url, options)
|
url = mangling.mangle_url(url, options)
|
||||||
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
return ret
|
return ret
|
||||||
|
@ -28,14 +28,11 @@ def scan_url(pkg, url, options):
|
|||||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||||
|
|
||||||
project, filename = re.search(
|
project, filename = re.search(
|
||||||
"mirror://sourceforge/([^/]+)/(?:.*/)?([^/]+)",
|
"mirror://sourceforge/([^/]+)/(?:.*/)?([^/]+)", url
|
||||||
url
|
|
||||||
).groups()
|
).groups()
|
||||||
|
|
||||||
base_url = "http://qa.debian.org/watch/sf.php/%s" % project
|
base_url = "http://qa.debian.org/watch/sf.php/%s" % project
|
||||||
file_pattern = regex_from_template(
|
file_pattern = regex_from_template(filename.replace(ver, "${PV}"))
|
||||||
filename.replace(ver, "${PV}")
|
|
||||||
)
|
|
||||||
|
|
||||||
result = url_scan(pkg, base_url, file_pattern)
|
result = url_scan(pkg, base_url, file_pattern)
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ def handle_directory_patterns(base, file_pattern):
|
|||||||
i += 1
|
i += 1
|
||||||
basedir = "/".join(basedir)
|
basedir = "/".join(basedir)
|
||||||
directory_pattern = splitted[i]
|
directory_pattern = splitted[i]
|
||||||
final = "/".join(splitted[i + 1:])
|
final = "/".join(splitted[i + 1 :])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fp = helpers.urlopen(basedir)
|
fp = helpers.urlopen(basedir)
|
||||||
@ -52,15 +52,14 @@ def handle_directory_patterns(base, file_pattern):
|
|||||||
else:
|
else:
|
||||||
scan_data = generic.scan_html(data, basedir, directory_pattern)
|
scan_data = generic.scan_html(data, basedir, directory_pattern)
|
||||||
|
|
||||||
return [("/".join((basedir, path, final)), file_pattern)
|
return [("/".join((basedir, path, final)), file_pattern) for _, path in scan_data]
|
||||||
for _, path in scan_data]
|
|
||||||
|
|
||||||
|
|
||||||
def read_options(options):
|
def read_options(options):
|
||||||
try:
|
try:
|
||||||
base, file_pattern = options['data'].split(" ")[:2]
|
base, file_pattern = options["data"].split(" ")[:2]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
base, file_pattern = options['data'], None
|
base, file_pattern = options["data"], None
|
||||||
|
|
||||||
# the file pattern can be in the base url
|
# the file pattern can be in the base url
|
||||||
pattern_regex = r"/([^/]*\([^/]*\)[^/]*)$"
|
pattern_regex = r"/([^/]*\([^/]*\)[^/]*)$"
|
||||||
@ -70,9 +69,7 @@ def read_options(options):
|
|||||||
base = base.replace(file_pattern, "")
|
base = base.replace(file_pattern, "")
|
||||||
|
|
||||||
# handle sf.net specially
|
# handle sf.net specially
|
||||||
base = base.replace(
|
base = base.replace("http://sf.net/", "http://qa.debian.org/watch/sf.php/")
|
||||||
"http://sf.net/", "http://qa.debian.org/watch/sf.php/"
|
|
||||||
)
|
|
||||||
|
|
||||||
return base, file_pattern
|
return base, file_pattern
|
||||||
|
|
||||||
|
@ -21,18 +21,17 @@ from euscan.version import parse_version
|
|||||||
|
|
||||||
def htop_vercmp(a, b):
|
def htop_vercmp(a, b):
|
||||||
def fixver(v):
|
def fixver(v):
|
||||||
if v in ['0.11', '0.12', '0.13']:
|
if v in ["0.11", "0.12", "0.13"]:
|
||||||
v = '0.1.' + v[3:]
|
v = "0.1." + v[3:]
|
||||||
return v
|
return v
|
||||||
|
|
||||||
return simple_vercmp(fixver(a), fixver(b))
|
return simple_vercmp(fixver(a), fixver(b))
|
||||||
|
|
||||||
VERSION_CMP_PACKAGE_QUIRKS = {
|
|
||||||
'sys-process/htop': htop_vercmp
|
|
||||||
}
|
|
||||||
|
|
||||||
_v_end = r'(?:(?:-|_)(?:pre|p|beta|b|alpha|a|rc|r)\d*)'
|
VERSION_CMP_PACKAGE_QUIRKS = {"sys-process/htop": htop_vercmp}
|
||||||
_v = r'((?:\d+)(?:(?:\.\d+)*)(?:[a-zA-Z]*?)(?:' + _v_end + '*))'
|
|
||||||
|
_v_end = r"(?:(?:-|_)(?:pre|p|beta|b|alpha|a|rc|r)\d*)"
|
||||||
|
_v = r"((?:\d+)(?:(?:\.\d+)*)(?:[a-zA-Z]*?)(?:" + _v_end + "*))"
|
||||||
|
|
||||||
|
|
||||||
def cast_int_components(version):
|
def cast_int_components(version):
|
||||||
@ -75,15 +74,15 @@ def version_is_nightly(a, b):
|
|||||||
b = parse_version(b)
|
b = parse_version(b)
|
||||||
|
|
||||||
# Try to skip nightly builds when not wanted (www-apps/moodle)
|
# Try to skip nightly builds when not wanted (www-apps/moodle)
|
||||||
if len(a) != len(b) and len(b) == 2 and len(b[0]) == len('yyyymmdd'):
|
if len(a) != len(b) and len(b) == 2 and len(b[0]) == len("yyyymmdd"):
|
||||||
if b[0][:4] != '0000':
|
if b[0][:4] != "0000":
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def version_blacklisted(cp, version):
|
def version_blacklisted(cp, version):
|
||||||
rule = None
|
rule = None
|
||||||
cpv = '%s-%s' % (cp, version)
|
cpv = "%s-%s" % (cp, version)
|
||||||
|
|
||||||
# Check that the generated cpv can be used by portage
|
# Check that the generated cpv can be used by portage
|
||||||
if not portage.versions.catpkgsplit(cpv):
|
if not portage.versions.catpkgsplit(cpv):
|
||||||
@ -104,10 +103,10 @@ def version_change_end_sep(version):
|
|||||||
if not match:
|
if not match:
|
||||||
return None
|
return None
|
||||||
end = match.group(1)
|
end = match.group(1)
|
||||||
if end[0] == '_':
|
if end[0] == "_":
|
||||||
newend = end.replace('_', '-')
|
newend = end.replace("_", "-")
|
||||||
elif end[0] == '-':
|
elif end[0] == "-":
|
||||||
newend = end.replace('-', '_')
|
newend = end.replace("-", "_")
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
return version.replace(end, newend)
|
return version.replace(end, newend)
|
||||||
@ -135,17 +134,17 @@ def generate_templates_vars(version):
|
|||||||
var = []
|
var = []
|
||||||
for j in range(i):
|
for j in range(i):
|
||||||
ver.append(str(part[j]))
|
ver.append(str(part[j]))
|
||||||
var.append('${%d}' % j)
|
var.append("${%d}" % j)
|
||||||
|
|
||||||
ret.append((".".join(ver), ".".join(var)))
|
ret.append((".".join(ver), ".".join(var)))
|
||||||
ret.append((version, '${PV}'))
|
ret.append((version, "${PV}"))
|
||||||
ret.reverse()
|
ret.reverse()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def template_from_url(url, version):
|
def template_from_url(url, version):
|
||||||
prefix, chunks = url.split('://')
|
prefix, chunks = url.split("://")
|
||||||
chunks = chunks.split('/')
|
chunks = chunks.split("/")
|
||||||
|
|
||||||
for i in range(len(chunks)):
|
for i in range(len(chunks)):
|
||||||
chunk = chunks[i]
|
chunk = chunks[i]
|
||||||
@ -162,9 +161,9 @@ def template_from_url(url, version):
|
|||||||
def url_from_template(url, version):
|
def url_from_template(url, version):
|
||||||
components = split_version(version)
|
components = split_version(version)
|
||||||
|
|
||||||
url = url.replace('${PV}', version)
|
url = url.replace("${PV}", version)
|
||||||
for i in range(len(components)):
|
for i in range(len(components)):
|
||||||
url = url.replace('${%d}' % i, str(components[i]))
|
url = url.replace("${%d}" % i, str(components[i]))
|
||||||
|
|
||||||
return url
|
return url
|
||||||
|
|
||||||
@ -172,8 +171,8 @@ def url_from_template(url, version):
|
|||||||
# Stolen from distutils.LooseVersion
|
# Stolen from distutils.LooseVersion
|
||||||
# Used for brute force to increment the version
|
# Used for brute force to increment the version
|
||||||
def split_version(version):
|
def split_version(version):
|
||||||
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
component_re = re.compile(r"(\d+ | [a-z]+ | \.)", re.VERBOSE)
|
||||||
components = [x for x in component_re.split(version) if x and x != '.']
|
components = [x for x in component_re.split(version) if x and x != "."]
|
||||||
for i in range(len(components)):
|
for i in range(len(components)):
|
||||||
try:
|
try:
|
||||||
components[i] = int(components[i])
|
components[i] = int(components[i])
|
||||||
@ -229,7 +228,7 @@ def gen_versions(components, level):
|
|||||||
|
|
||||||
|
|
||||||
def timeout_for_url(url):
|
def timeout_for_url(url):
|
||||||
if 'sourceforge' in url:
|
if "sourceforge" in url:
|
||||||
timeout = 15
|
timeout = 15
|
||||||
else:
|
else:
|
||||||
timeout = 5
|
timeout = 5
|
||||||
@ -246,7 +245,7 @@ rpcache = {}
|
|||||||
|
|
||||||
|
|
||||||
def urlallowed(url):
|
def urlallowed(url):
|
||||||
if CONFIG['skip-robots-txt']:
|
if CONFIG["skip-robots-txt"]:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
protocol, domain = urllib.parse.urlparse(url)[:2]
|
protocol, domain = urllib.parse.urlparse(url)[:2]
|
||||||
@ -255,15 +254,15 @@ def urlallowed(url):
|
|||||||
if re.match(bd, domain):
|
if re.match(bd, domain):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
for d in ['sourceforge', 'berlios', 'github.com']:
|
for d in ["sourceforge", "berlios", "github.com"]:
|
||||||
if d in domain:
|
if d in domain:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if protocol == 'ftp':
|
if protocol == "ftp":
|
||||||
return True
|
return True
|
||||||
|
|
||||||
baseurl = '%s://%s' % (protocol, domain)
|
baseurl = "%s://%s" % (protocol, domain)
|
||||||
robotsurl = urllib.parse.urljoin(baseurl, 'robots.txt')
|
robotsurl = urllib.parse.urljoin(baseurl, "robots.txt")
|
||||||
|
|
||||||
if baseurl in rpcache:
|
if baseurl in rpcache:
|
||||||
rp = rpcache[baseurl]
|
rp = rpcache[baseurl]
|
||||||
@ -283,7 +282,7 @@ def urlallowed(url):
|
|||||||
|
|
||||||
setdefaulttimeout(timeout)
|
setdefaulttimeout(timeout)
|
||||||
|
|
||||||
return rp.can_fetch(CONFIG['user-agent'], url) if rp else True
|
return rp.can_fetch(CONFIG["user-agent"], url) if rp else True
|
||||||
|
|
||||||
|
|
||||||
def urlopen(url, timeout=None, verb="GET"):
|
def urlopen(url, timeout=None, verb="GET"):
|
||||||
@ -294,23 +293,24 @@ def urlopen(url, timeout=None, verb="GET"):
|
|||||||
if not timeout:
|
if not timeout:
|
||||||
timeout = timeout_for_url(url)
|
timeout = timeout_for_url(url)
|
||||||
|
|
||||||
if verb == 'GET':
|
if verb == "GET":
|
||||||
request = urllib.request.Request(url)
|
request = urllib.request.Request(url)
|
||||||
elif verb == 'HEAD':
|
elif verb == "HEAD":
|
||||||
request = HeadRequest(url)
|
request = HeadRequest(url)
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
request.add_header('User-Agent', CONFIG['user-agent'])
|
request.add_header("User-Agent", CONFIG["user-agent"])
|
||||||
|
|
||||||
handlers = []
|
handlers = []
|
||||||
|
|
||||||
if CONFIG['cache']:
|
if CONFIG["cache"]:
|
||||||
from cache import CacheHandler
|
from cache import CacheHandler
|
||||||
handlers.append(CacheHandler(CONFIG['cache']))
|
|
||||||
|
|
||||||
if CONFIG['verbose']:
|
handlers.append(CacheHandler(CONFIG["cache"]))
|
||||||
debuglevel = CONFIG['verbose'] - 1
|
|
||||||
|
if CONFIG["verbose"]:
|
||||||
|
debuglevel = CONFIG["verbose"] - 1
|
||||||
handlers.append(urllib.request.HTTPHandler(debuglevel=debuglevel))
|
handlers.append(urllib.request.HTTPHandler(debuglevel=debuglevel))
|
||||||
|
|
||||||
opener = urllib.request.build_opener(*handlers)
|
opener = urllib.request.build_opener(*handlers)
|
||||||
@ -330,7 +330,7 @@ def tryurl(fileurl, template):
|
|||||||
try:
|
try:
|
||||||
basename = os.path.basename(fileurl)
|
basename = os.path.basename(fileurl)
|
||||||
|
|
||||||
fp = urlopen(fileurl, verb='HEAD')
|
fp = urlopen(fileurl, verb="HEAD")
|
||||||
if not fp:
|
if not fp:
|
||||||
euscan.output.eend(errno.EPERM)
|
euscan.output.eend(errno.EPERM)
|
||||||
return None
|
return None
|
||||||
@ -339,16 +339,17 @@ def tryurl(fileurl, template):
|
|||||||
|
|
||||||
# Some URLs return Content-disposition with different filename
|
# Some URLs return Content-disposition with different filename
|
||||||
# Disable check for now (I have no seen false positives)
|
# Disable check for now (I have no seen false positives)
|
||||||
#if 'Content-disposition' in headers and \
|
# if 'Content-disposition' in headers and \
|
||||||
# basename not in headers['Content-disposition']:
|
# basename not in headers['Content-disposition']:
|
||||||
# result = None
|
# result = None
|
||||||
if 'Content-Length' in headers and headers['Content-Length'] == '0':
|
if "Content-Length" in headers and headers["Content-Length"] == "0":
|
||||||
result = None
|
result = None
|
||||||
elif 'Content-Type' in headers and \
|
elif "Content-Type" in headers and "text/html" in headers["Content-Type"]:
|
||||||
'text/html' in headers['Content-Type']:
|
|
||||||
result = None
|
result = None
|
||||||
elif 'Content-Type' in headers and \
|
elif (
|
||||||
'application/x-httpd-php' in headers['Content-Type']:
|
"Content-Type" in headers
|
||||||
|
and "application/x-httpd-php" in headers["Content-Type"]
|
||||||
|
):
|
||||||
result = None
|
result = None
|
||||||
elif fp.geturl() != fileurl:
|
elif fp.geturl() != fileurl:
|
||||||
regex = regex_from_template(template)
|
regex = regex_from_template(template)
|
||||||
@ -356,8 +357,9 @@ def tryurl(fileurl, template):
|
|||||||
basename2 = os.path.basename(fp.geturl())
|
basename2 = os.path.basename(fp.geturl())
|
||||||
|
|
||||||
# Redirect to another (earlier?) version
|
# Redirect to another (earlier?) version
|
||||||
if basename != basename2 and (re.match(regex, fp.geturl()) or \
|
if basename != basename2 and (
|
||||||
re.match(baseregex, basename2)):
|
re.match(regex, fp.geturl()) or re.match(baseregex, basename2)
|
||||||
|
):
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
@ -378,33 +380,33 @@ def regex_from_template(template):
|
|||||||
regexp = re.escape(template)
|
regexp = re.escape(template)
|
||||||
|
|
||||||
# Unescape specific stuff
|
# Unescape specific stuff
|
||||||
regexp = regexp.replace('\$\{', '${')
|
regexp = regexp.replace("\$\{", "${")
|
||||||
regexp = regexp.replace('\}', '}')
|
regexp = regexp.replace("\}", "}")
|
||||||
regexp = regexp.replace('}\.$', '}.$')
|
regexp = regexp.replace("}\.$", "}.$")
|
||||||
|
|
||||||
# Replace ${\d+}
|
# Replace ${\d+}
|
||||||
#regexp = regexp.replace('${0}', r'([\d]+?)')
|
# regexp = regexp.replace('${0}', r'([\d]+?)')
|
||||||
regexp = re.sub(r'(\$\{\d+\}(\.?))+', r'([\\w\.]+?)', regexp)
|
regexp = re.sub(r"(\$\{\d+\}(\.?))+", r"([\\w\.]+?)", regexp)
|
||||||
|
|
||||||
#regexp = re.sub(r'(\$\{\d+\}\.?)+', r'([\w]+?)', regexp)
|
# regexp = re.sub(r'(\$\{\d+\}\.?)+', r'([\w]+?)', regexp)
|
||||||
#regexp = re.sub(r'(\$\{\d+\}\.+)+', '(.+?)\.', regexp)
|
# regexp = re.sub(r'(\$\{\d+\}\.+)+', '(.+?)\.', regexp)
|
||||||
#regexp = re.sub(r'(\$\{\d+\})+', '(.+?)', regexp)
|
# regexp = re.sub(r'(\$\{\d+\})+', '(.+?)', regexp)
|
||||||
|
|
||||||
# Full version
|
# Full version
|
||||||
regexp = regexp.replace('${PV}', _v)
|
regexp = regexp.replace("${PV}", _v)
|
||||||
|
|
||||||
# End
|
# End
|
||||||
regexp = regexp + r'/?$'
|
regexp = regexp + r"/?$"
|
||||||
|
|
||||||
return regexp
|
return regexp
|
||||||
|
|
||||||
|
|
||||||
def basedir_from_template(template):
|
def basedir_from_template(template):
|
||||||
idx = template.find('${')
|
idx = template.find("${")
|
||||||
if idx == -1:
|
if idx == -1:
|
||||||
return template
|
return template
|
||||||
|
|
||||||
idx = template[0:idx].rfind('/')
|
idx = template[0:idx].rfind("/")
|
||||||
if idx == -1:
|
if idx == -1:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
@ -412,15 +414,15 @@ def basedir_from_template(template):
|
|||||||
|
|
||||||
|
|
||||||
def generate_scan_paths(url):
|
def generate_scan_paths(url):
|
||||||
prefix, chunks = url.split('://')
|
prefix, chunks = url.split("://")
|
||||||
chunks = chunks.split('/')
|
chunks = chunks.split("/")
|
||||||
|
|
||||||
steps = []
|
steps = []
|
||||||
|
|
||||||
path = prefix + ":/"
|
path = prefix + ":/"
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
if '${' in chunk:
|
if "${" in chunk:
|
||||||
steps.append((path, '^(?:|.*/)' + regex_from_template(chunk)))
|
steps.append((path, "^(?:|.*/)" + regex_from_template(chunk)))
|
||||||
path = ""
|
path = ""
|
||||||
else:
|
else:
|
||||||
path += "/"
|
path += "/"
|
||||||
@ -444,7 +446,7 @@ def parse_mirror(uri):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
mirrorname = uri[9:eidx]
|
mirrorname = uri[9:eidx]
|
||||||
path = uri[eidx + 1:]
|
path = uri[eidx + 1 :]
|
||||||
|
|
||||||
if mirrorname in mirrors:
|
if mirrorname in mirrors:
|
||||||
mirrors = mirrors[mirrorname]
|
mirrors = mirrors[mirrorname]
|
||||||
|
@ -31,11 +31,11 @@ def apply_mangling_rules(kind, rules, string):
|
|||||||
ret = None
|
ret = None
|
||||||
|
|
||||||
# First try handlers rules
|
# First try handlers rules
|
||||||
if rule == 'gentoo' and kind == 'versionmangle':
|
if rule == "gentoo" and kind == "versionmangle":
|
||||||
ret = gentoo_mangle_version(string)
|
ret = gentoo_mangle_version(string)
|
||||||
elif kind == 'downloadurlmangle':
|
elif kind == "downloadurlmangle":
|
||||||
ret = euscan.handlers.mangle_url(rule, string)
|
ret = euscan.handlers.mangle_url(rule, string)
|
||||||
elif kind == 'versionmangle':
|
elif kind == "versionmangle":
|
||||||
ret = euscan.handlers.mangle_version(rule, string)
|
ret = euscan.handlers.mangle_version(rule, string)
|
||||||
|
|
||||||
if ret is not None: # Use return value as new string if not None
|
if ret is not None: # Use return value as new string if not None
|
||||||
@ -48,13 +48,13 @@ def apply_mangling_rules(kind, rules, string):
|
|||||||
|
|
||||||
def mangle_version(up_pv, options):
|
def mangle_version(up_pv, options):
|
||||||
# Default rule is gentoo when empty
|
# Default rule is gentoo when empty
|
||||||
if 'versionmangle' not in options or not options['versionmangle']:
|
if "versionmangle" not in options or not options["versionmangle"]:
|
||||||
options['versionmangle'] = ['gentoo']
|
options["versionmangle"] = ["gentoo"]
|
||||||
return apply_mangling_rules('versionmangle', options, up_pv)
|
return apply_mangling_rules("versionmangle", options, up_pv)
|
||||||
|
|
||||||
|
|
||||||
def mangle_url(url, options):
|
def mangle_url(url, options):
|
||||||
return apply_mangling_rules('downloadurlmangle', options, url)
|
return apply_mangling_rules("downloadurlmangle", options, url)
|
||||||
|
|
||||||
|
|
||||||
# Stolen from g-pypi
|
# Stolen from g-pypi
|
||||||
@ -107,30 +107,28 @@ def gentoo_mangle_version(up_pv):
|
|||||||
number of match.groups every time to simplify the code
|
number of match.groups every time to simplify the code
|
||||||
|
|
||||||
"""
|
"""
|
||||||
bad_suffixes = re.compile(
|
bad_suffixes = re.compile(r"((?:[._-]*)(?:dev|devel|final|stable|snapshot)$)", re.I)
|
||||||
r'((?:[._-]*)(?:dev|devel|final|stable|snapshot)$)', re.I)
|
revision_suffixes = re.compile(r"(.*?)([\._-]*(?:r|patch|p)[\._-]*)([0-9]*)$", re.I)
|
||||||
revision_suffixes = re.compile(
|
|
||||||
r'(.*?)([\._-]*(?:r|patch|p)[\._-]*)([0-9]*)$', re.I)
|
|
||||||
suf_matches = {
|
suf_matches = {
|
||||||
'_pre': [
|
"_pre": [
|
||||||
r'(.*?)([\._-]*dev[\._-]*r?)([0-9]+)$',
|
r"(.*?)([\._-]*dev[\._-]*r?)([0-9]+)$",
|
||||||
r'(.*?)([\._-]*(?:pre|preview)[\._-]*)([0-9]*)$',
|
r"(.*?)([\._-]*(?:pre|preview)[\._-]*)([0-9]*)$",
|
||||||
],
|
],
|
||||||
'_alpha': [
|
"_alpha": [
|
||||||
r'(.*?)([\._-]*(?:alpha|test)[\._-]*)([0-9]*)$',
|
r"(.*?)([\._-]*(?:alpha|test)[\._-]*)([0-9]*)$",
|
||||||
r'(.*?)([\._-]*a[\._-]*)([0-9]*)$',
|
r"(.*?)([\._-]*a[\._-]*)([0-9]*)$",
|
||||||
r'(.*[^a-z])(a)([0-9]*)$',
|
r"(.*[^a-z])(a)([0-9]*)$",
|
||||||
],
|
],
|
||||||
'_beta': [
|
"_beta": [
|
||||||
r'(.*?)([\._-]*beta[\._-]*)([0-9]*)$',
|
r"(.*?)([\._-]*beta[\._-]*)([0-9]*)$",
|
||||||
r'(.*?)([\._-]*b)([0-9]*)$',
|
r"(.*?)([\._-]*b)([0-9]*)$",
|
||||||
r'(.*[^a-z])(b)([0-9]*)$',
|
r"(.*[^a-z])(b)([0-9]*)$",
|
||||||
],
|
],
|
||||||
'_rc': [
|
"_rc": [
|
||||||
r'(.*?)([\._-]*rc[\._-]*)([0-9]*)$',
|
r"(.*?)([\._-]*rc[\._-]*)([0-9]*)$",
|
||||||
r'(.*?)([\._-]*c[\._-]*)([0-9]*)$',
|
r"(.*?)([\._-]*c[\._-]*)([0-9]*)$",
|
||||||
r'(.*[^a-z])(c[\._-]*)([0-9]+)$',
|
r"(.*[^a-z])(c[\._-]*)([0-9]+)$",
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
rs_match = None
|
rs_match = None
|
||||||
pv = up_pv
|
pv = up_pv
|
||||||
@ -139,9 +137,9 @@ def gentoo_mangle_version(up_pv):
|
|||||||
rev_match = revision_suffixes.search(up_pv)
|
rev_match = revision_suffixes.search(up_pv)
|
||||||
if rev_match:
|
if rev_match:
|
||||||
pv = up_pv = rev_match.group(1)
|
pv = up_pv = rev_match.group(1)
|
||||||
replace_me = rev_match.group(2)
|
# replace_me = rev_match.group(2)
|
||||||
rev = rev_match.group(3)
|
rev = rev_match.group(3)
|
||||||
additional_version = '_p' + rev
|
additional_version = "_p" + rev
|
||||||
|
|
||||||
for this_suf in list(suf_matches.keys()):
|
for this_suf in list(suf_matches.keys()):
|
||||||
if rs_match:
|
if rs_match:
|
||||||
@ -156,7 +154,7 @@ def gentoo_mangle_version(up_pv):
|
|||||||
if rs_match:
|
if rs_match:
|
||||||
# e.g. 1.0.dev-r1234
|
# e.g. 1.0.dev-r1234
|
||||||
major_ver = rs_match.group(1) # 1.0
|
major_ver = rs_match.group(1) # 1.0
|
||||||
replace_me = rs_match.group(2) # .dev-r
|
# replace_me = rs_match.group(2) # .dev-r
|
||||||
rev = rs_match.group(3) # 1234
|
rev = rs_match.group(3) # 1234
|
||||||
pv = major_ver + portage_suffix + rev
|
pv = major_ver + portage_suffix + rev
|
||||||
else:
|
else:
|
||||||
@ -164,7 +162,7 @@ def gentoo_mangle_version(up_pv):
|
|||||||
match = bad_suffixes.search(up_pv)
|
match = bad_suffixes.search(up_pv)
|
||||||
if match:
|
if match:
|
||||||
suffix = match.groups()[0]
|
suffix = match.groups()[0]
|
||||||
pv = up_pv[: - (len(suffix))]
|
pv = up_pv[: -(len(suffix))]
|
||||||
|
|
||||||
pv = pv + additional_version
|
pv = pv + additional_version
|
||||||
|
|
||||||
|
@ -51,10 +51,12 @@ def progress_bar():
|
|||||||
|
|
||||||
def display():
|
def display():
|
||||||
progress_bar.set(progress_handler.curval, progress_handler.maxval)
|
progress_bar.set(progress_handler.curval, progress_handler.maxval)
|
||||||
|
|
||||||
progress_handler.display = display
|
progress_handler.display = display
|
||||||
|
|
||||||
def sigwinch_handler(signum, frame):
|
def sigwinch_handler(signum, frame):
|
||||||
lines, progress_bar.term_columns = portage.output.get_term_size()
|
lines, progress_bar.term_columns = portage.output.get_term_size()
|
||||||
|
|
||||||
signal.signal(signal.SIGWINCH, sigwinch_handler)
|
signal.signal(signal.SIGWINCH, sigwinch_handler)
|
||||||
|
|
||||||
yield on_progress
|
yield on_progress
|
||||||
@ -75,29 +77,29 @@ def clean_colors(string):
|
|||||||
|
|
||||||
|
|
||||||
def transform_url(config, cpv, url):
|
def transform_url(config, cpv, url):
|
||||||
if config['mirror']:
|
if config["mirror"]:
|
||||||
url = to_mirror(url)
|
url = to_mirror(url)
|
||||||
if config['ebuild-uri']:
|
if config["ebuild-uri"]:
|
||||||
url = to_ebuild_uri(cpv, url)
|
url = to_ebuild_uri(cpv, url)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
def to_ebuild_uri(cpv, url):
|
def to_ebuild_uri(cpv, url):
|
||||||
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
||||||
p = '%s-%s' % (pkg, ver)
|
p = "%s-%s" % (pkg, ver)
|
||||||
pvr = '%s%s' % (ver, '-%s' % rev if rev != 'r0' else '')
|
pvr = "%s%s" % (ver, "-%s" % rev if rev != "r0" else "")
|
||||||
pf = '%s-%s' % (pkg, pvr)
|
pf = "%s-%s" % (pkg, pvr)
|
||||||
evars = (
|
evars = (
|
||||||
(p, 'P'),
|
(p, "P"),
|
||||||
(pkg, 'PN'),
|
(pkg, "PN"),
|
||||||
(ver, 'PV'),
|
(ver, "PV"),
|
||||||
(rev, 'PR'),
|
(rev, "PR"),
|
||||||
(pvr, 'PVR'),
|
(pvr, "PVR"),
|
||||||
(pf, 'PF'),
|
(pf, "PF"),
|
||||||
(cat, 'CATEGORY')
|
(cat, "CATEGORY"),
|
||||||
)
|
)
|
||||||
for src, dst in evars:
|
for src, dst in evars:
|
||||||
url = url.replace(src, '${%s}' % dst)
|
url = url.replace(src, "${%s}" % dst)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
@ -112,14 +114,14 @@ def load_mirrors():
|
|||||||
|
|
||||||
|
|
||||||
def from_mirror(url):
|
def from_mirror(url):
|
||||||
if not url.startswith('mirror://'):
|
if not url.startswith("mirror://"):
|
||||||
return url
|
return url
|
||||||
|
|
||||||
if not mirrors_:
|
if not mirrors_:
|
||||||
load_mirrors()
|
load_mirrors()
|
||||||
|
|
||||||
for mirror_name in mirrors_:
|
for mirror_name in mirrors_:
|
||||||
prefix = 'mirror://' + mirror_name
|
prefix = "mirror://" + mirror_name
|
||||||
if url.startswith(prefix):
|
if url.startswith(prefix):
|
||||||
return url.replace(prefix, mirrors_[mirror_name][0])
|
return url.replace(prefix, mirrors_[mirror_name][0])
|
||||||
|
|
||||||
@ -137,7 +139,7 @@ def to_mirror(url):
|
|||||||
return "mirror://%s%s%s" % (
|
return "mirror://%s%s%s" % (
|
||||||
mirror_name,
|
mirror_name,
|
||||||
"" if url_part.startswith("/") else "/",
|
"" if url_part.startswith("/") else "/",
|
||||||
url_part
|
url_part,
|
||||||
)
|
)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
@ -146,6 +148,7 @@ class EOutputMem(EOutput):
|
|||||||
"""
|
"""
|
||||||
Override of EOutput, allows to specify an output file for writes
|
Override of EOutput, allows to specify an output file for writes
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(EOutputMem, self).__init__(*args, **kwargs)
|
super(EOutputMem, self).__init__(*args, **kwargs)
|
||||||
self.out = StringIO()
|
self.out = StringIO()
|
||||||
@ -161,6 +164,7 @@ class EuscanOutput(object):
|
|||||||
"""
|
"""
|
||||||
Class that handles output for euscan
|
Class that handles output for euscan
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
self.config = config
|
self.config = config
|
||||||
self.queries = defaultdict(dict)
|
self.queries = defaultdict(dict)
|
||||||
@ -212,12 +216,10 @@ class EuscanOutput(object):
|
|||||||
def result(self, cp, version, urls, handler, confidence):
|
def result(self, cp, version, urls, handler, confidence):
|
||||||
from euscan.version import get_version_type
|
from euscan.version import get_version_type
|
||||||
|
|
||||||
cpv = '%s-%s' % (cp, version)
|
cpv = "%s-%s" % (cp, version)
|
||||||
urls = ' '.join(
|
urls = " ".join(transform_url(self.config, cpv, url) for url in urls.split())
|
||||||
transform_url(self.config, cpv, url) for url in urls.split()
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.config['format'] in ['json', 'dict']:
|
if self.config["format"] in ["json", "dict"]:
|
||||||
_curr = self.queries[self.current_query]
|
_curr = self.queries[self.current_query]
|
||||||
_curr["result"].append(
|
_curr["result"].append(
|
||||||
{
|
{
|
||||||
@ -225,12 +227,12 @@ class EuscanOutput(object):
|
|||||||
"urls": urls.split(),
|
"urls": urls.split(),
|
||||||
"handler": handler,
|
"handler": handler,
|
||||||
"confidence": confidence,
|
"confidence": confidence,
|
||||||
"type": get_version_type(version)
|
"type": get_version_type(version),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not self.config['quiet']:
|
if not self.config["quiet"]:
|
||||||
print("Upstream Version:", pp.number("%s" % version), end=' ')
|
print("Upstream Version:", pp.number("%s" % version), end=" ")
|
||||||
print(pp.path(" %s" % urls))
|
print(pp.path(" %s" % urls))
|
||||||
else:
|
else:
|
||||||
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
|
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@ -22,7 +20,6 @@ def filter_versions(cp, versions):
|
|||||||
filtered = {}
|
filtered = {}
|
||||||
|
|
||||||
for url, version, handler, confidence in versions:
|
for url, version, handler, confidence in versions:
|
||||||
|
|
||||||
# Try to keep the most specific urls (determinted by the length)
|
# Try to keep the most specific urls (determinted by the length)
|
||||||
if version in filtered and len(url) < len(filtered[version]):
|
if version in filtered and len(url) < len(filtered[version]):
|
||||||
continue
|
continue
|
||||||
@ -31,15 +28,16 @@ def filter_versions(cp, versions):
|
|||||||
if version_blacklisted(cp, version):
|
if version_blacklisted(cp, version):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
filtered[version] = {
|
filtered[version] = {"url": url, "handler": handler, "confidence": confidence}
|
||||||
"url": url,
|
|
||||||
"handler": handler,
|
|
||||||
"confidence": confidence
|
|
||||||
}
|
|
||||||
|
|
||||||
return [
|
return [
|
||||||
(cp, filtered[version]["url"], version, filtered[version]["handler"],
|
(
|
||||||
filtered[version]["confidence"])
|
cp,
|
||||||
|
filtered[version]["url"],
|
||||||
|
version,
|
||||||
|
filtered[version]["handler"],
|
||||||
|
filtered[version]["confidence"],
|
||||||
|
)
|
||||||
for version in filtered
|
for version in filtered
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -52,7 +50,7 @@ def parse_src_uri(uris):
|
|||||||
while uris:
|
while uris:
|
||||||
uri = uris.pop()
|
uri = uris.pop()
|
||||||
|
|
||||||
if '://' not in uri:
|
if "://" not in uri:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if uris and uris[-1] == "->":
|
if uris and uris[-1] == "->":
|
||||||
@ -73,16 +71,16 @@ def reload_gentoolkit():
|
|||||||
import gentoolkit
|
import gentoolkit
|
||||||
|
|
||||||
# Not used in recent versions
|
# Not used in recent versions
|
||||||
if not hasattr(gentoolkit.package, 'PORTDB'):
|
if not hasattr(gentoolkit.package, "PORTDB"):
|
||||||
return
|
return
|
||||||
|
|
||||||
PORTDB = portage.db[portage.root]["porttree"].dbapi
|
PORTDB = portage.db[portage.root]["porttree"].dbapi
|
||||||
|
|
||||||
if hasattr(gentoolkit.dbapi, 'PORTDB'):
|
if hasattr(gentoolkit.dbapi, "PORTDB"):
|
||||||
gentoolkit.dbapi.PORTDB = PORTDB
|
gentoolkit.dbapi.PORTDB = PORTDB
|
||||||
if hasattr(gentoolkit.package, 'PORTDB'):
|
if hasattr(gentoolkit.package, "PORTDB"):
|
||||||
gentoolkit.package.PORTDB = PORTDB
|
gentoolkit.package.PORTDB = PORTDB
|
||||||
if hasattr(gentoolkit.query, 'PORTDB'):
|
if hasattr(gentoolkit.query, "PORTDB"):
|
||||||
gentoolkit.query.PORTDB = PORTDB
|
gentoolkit.query.PORTDB = PORTDB
|
||||||
|
|
||||||
|
|
||||||
@ -104,21 +102,18 @@ def scan_upstream(query, on_progress=None):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not matches:
|
if not matches:
|
||||||
output.ewarn(
|
output.ewarn(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
|
||||||
pp.warn("No package matching '%s'" % pp.pkgquery(query))
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
matches = sorted(matches)
|
matches = sorted(matches)
|
||||||
pkg = matches.pop()
|
pkg = matches.pop()
|
||||||
|
|
||||||
while '9999' in pkg.version and len(matches):
|
while "9999" in pkg.version and len(matches):
|
||||||
pkg = matches.pop()
|
pkg = matches.pop()
|
||||||
|
|
||||||
if not pkg:
|
if not pkg:
|
||||||
output.ewarn(
|
output.ewarn(
|
||||||
pp.warn("Package '%s' only have a dev version (9999)"
|
pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp))
|
||||||
% pp.pkgquery(pkg.cp))
|
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -132,42 +127,34 @@ def scan_upstream(query, on_progress=None):
|
|||||||
on_progress(increment=10)
|
on_progress(increment=10)
|
||||||
|
|
||||||
if pkg.cp in BLACKLIST_PACKAGES:
|
if pkg.cp in BLACKLIST_PACKAGES:
|
||||||
output.ewarn(
|
output.ewarn(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
|
||||||
pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp))
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not CONFIG['quiet']:
|
if not CONFIG["quiet"]:
|
||||||
if not CONFIG['format']:
|
if not CONFIG["format"]:
|
||||||
pp.uprint(
|
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
|
||||||
" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))
|
|
||||||
)
|
|
||||||
pp.uprint()
|
pp.uprint()
|
||||||
else:
|
else:
|
||||||
output.metadata("overlay", pp.section(pkg.repo_name()))
|
output.metadata("overlay", pp.section(pkg.repo_name()))
|
||||||
|
|
||||||
ebuild_path = pkg.ebuild_path()
|
ebuild_path = pkg.ebuild_path()
|
||||||
if ebuild_path:
|
if ebuild_path:
|
||||||
output.metadata(
|
output.metadata("ebuild", pp.path(os.path.normpath(ebuild_path)))
|
||||||
"ebuild", pp.path(os.path.normpath(ebuild_path))
|
|
||||||
)
|
|
||||||
|
|
||||||
uris, homepage, description = pkg.environment(
|
uris, homepage, description = pkg.environment(
|
||||||
('SRC_URI', 'HOMEPAGE', 'DESCRIPTION')
|
("SRC_URI", "HOMEPAGE", "DESCRIPTION")
|
||||||
)
|
)
|
||||||
|
|
||||||
output.metadata("repository", pkg.repo_name())
|
output.metadata("repository", pkg.repo_name())
|
||||||
output.metadata("homepage", homepage)
|
output.metadata("homepage", homepage)
|
||||||
output.metadata("description", description)
|
output.metadata("description", description)
|
||||||
else:
|
else:
|
||||||
uris = pkg.environment('SRC_URI')
|
uris = pkg.environment("SRC_URI")
|
||||||
|
|
||||||
cpv = pkg.cpv
|
cpv = pkg.cpv
|
||||||
|
|
||||||
uris = parse_src_uri(uris)
|
uris = parse_src_uri(uris)
|
||||||
uris_expanded = [
|
uris_expanded = [from_mirror(uri) if "mirror://" in uri else uri for uri in uris]
|
||||||
from_mirror(uri) if 'mirror://' in uri else uri for uri in uris
|
|
||||||
]
|
|
||||||
|
|
||||||
pkg._uris = uris
|
pkg._uris = uris
|
||||||
pkg._uris_expanded = uris_expanded
|
pkg._uris_expanded = uris_expanded
|
||||||
@ -187,17 +174,16 @@ def scan_upstream(query, on_progress=None):
|
|||||||
|
|
||||||
is_current_version_stable = is_version_stable(ver)
|
is_current_version_stable = is_version_stable(ver)
|
||||||
if len(result) > 0:
|
if len(result) > 0:
|
||||||
if not (CONFIG['format'] or CONFIG['quiet']):
|
if not (CONFIG["format"] or CONFIG["quiet"]):
|
||||||
print("")
|
print("")
|
||||||
for cp, url, version, handler, confidence in result:
|
for cp, url, version, handler, confidence in result:
|
||||||
if CONFIG["ignore-pre-release"]:
|
if CONFIG["ignore-pre-release"]:
|
||||||
if not is_version_stable(version):
|
if not is_version_stable(version):
|
||||||
continue
|
continue
|
||||||
if CONFIG["ignore-pre-release-if-stable"]:
|
if CONFIG["ignore-pre-release-if-stable"]:
|
||||||
if is_current_version_stable and \
|
if is_current_version_stable and not is_version_stable(version):
|
||||||
not is_version_stable(version):
|
|
||||||
continue
|
continue
|
||||||
if CONFIG['progress']:
|
if CONFIG["progress"]:
|
||||||
print("", file=sys.stderr)
|
print("", file=sys.stderr)
|
||||||
output.result(cp, version, url, handler, confidence)
|
output.result(cp, version, url, handler, confidence)
|
||||||
|
|
||||||
|
@ -25,24 +25,24 @@ def get_version_type(version):
|
|||||||
return types[0] # TODO: consider returning all types
|
return types[0] # TODO: consider returning all types
|
||||||
return "release"
|
return "release"
|
||||||
|
|
||||||
|
|
||||||
# Stolen from pkg_resources, but importing it is not a good idea
|
# Stolen from pkg_resources, but importing it is not a good idea
|
||||||
|
|
||||||
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
|
component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
|
||||||
replace = \
|
replace = {"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@"}.get
|
||||||
{'pre': 'c', 'preview': 'c', '-': 'final-', 'rc': 'c', 'dev': '@'}.get
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_version_parts(s):
|
def _parse_version_parts(s):
|
||||||
for part in component_re.split(s):
|
for part in component_re.split(s):
|
||||||
part = replace(part, part)
|
part = replace(part, part)
|
||||||
if not part or part == '.':
|
if not part or part == ".":
|
||||||
continue
|
continue
|
||||||
if part[:1] in '0123456789':
|
if part[:1] in "0123456789":
|
||||||
yield part.zfill(8) # pad for numeric comparison
|
yield part.zfill(8) # pad for numeric comparison
|
||||||
else:
|
else:
|
||||||
yield '*' + part
|
yield "*" + part
|
||||||
|
|
||||||
yield '*final' # ensure that alpha/beta/candidate are before final
|
yield "*final" # ensure that alpha/beta/candidate are before final
|
||||||
|
|
||||||
|
|
||||||
def parse_version(s):
|
def parse_version(s):
|
||||||
@ -78,12 +78,12 @@ def parse_version(s):
|
|||||||
"""
|
"""
|
||||||
parts = []
|
parts = []
|
||||||
for part in _parse_version_parts(s.lower()):
|
for part in _parse_version_parts(s.lower()):
|
||||||
if part.startswith('*'):
|
if part.startswith("*"):
|
||||||
if part < '*final': # remove '-' before a prerelease tag
|
if part < "*final": # remove '-' before a prerelease tag
|
||||||
while parts and parts[-1] == '*final-':
|
while parts and parts[-1] == "*final-":
|
||||||
parts.pop()
|
parts.pop()
|
||||||
# remove trailing zeros from each series of numeric parts
|
# remove trailing zeros from each series of numeric parts
|
||||||
while parts and parts[-1] == '00000000':
|
while parts and parts[-1] == "00000000":
|
||||||
parts.pop()
|
parts.pop()
|
||||||
parts.append(part)
|
parts.append(part)
|
||||||
return tuple(parts)
|
return tuple(parts)
|
||||||
|
Loading…
Reference in New Issue
Block a user