2012-07-24 08:10:56 +02:00
|
|
|
import sys
|
2012-05-25 00:19:17 +02:00
|
|
|
from io import StringIO
|
|
|
|
from collections import defaultdict
|
|
|
|
import json
|
2012-05-27 10:09:39 +02:00
|
|
|
import signal
|
|
|
|
import time
|
2012-06-26 18:13:28 +02:00
|
|
|
import re
|
2012-05-25 00:19:17 +02:00
|
|
|
|
2012-05-27 10:09:39 +02:00
|
|
|
import portage
|
|
|
|
from portage.output import EOutput, TermProgressBar
|
2012-07-20 08:22:52 +02:00
|
|
|
from gentoolkit import pprinter as pp
|
2012-05-27 10:09:39 +02:00
|
|
|
|
2012-09-02 18:39:04 +02:00
|
|
|
from euscan.helpers import dict_to_xml
|
|
|
|
|
2012-08-06 23:30:29 +02:00
|
|
|
mirrors_ = None
|
2012-07-20 15:12:44 +02:00
|
|
|
|
2012-08-07 09:39:49 +02:00
|
|
|
|
2012-05-27 10:09:39 +02:00
|
|
|
class ProgressHandler(object):
|
2012-06-28 12:20:57 +02:00
|
|
|
def __init__(self, progress_bar):
|
2012-05-27 10:09:39 +02:00
|
|
|
self.curval = 0
|
|
|
|
self.maxval = 0
|
|
|
|
self.last_update = 0
|
|
|
|
self.min_display_latency = 0.2
|
2012-06-28 12:20:57 +02:00
|
|
|
self.progress_bar = progress_bar
|
|
|
|
|
|
|
|
def on_progress(self, maxval=None, increment=1, label=None):
|
|
|
|
self.maxval = maxval or self.maxval
|
|
|
|
self.curval += increment
|
|
|
|
|
|
|
|
if label:
|
|
|
|
self.progress_bar.label(label)
|
2012-05-27 10:09:39 +02:00
|
|
|
|
|
|
|
cur_time = time.time()
|
|
|
|
if cur_time - self.last_update >= self.min_display_latency:
|
|
|
|
self.last_update = cur_time
|
|
|
|
self.display()
|
|
|
|
|
|
|
|
def display(self):
|
|
|
|
raise NotImplementedError(self)
|
|
|
|
|
|
|
|
|
|
|
|
def progress_bar():
|
|
|
|
on_progress = None
|
2012-07-26 21:31:10 +02:00
|
|
|
try:
|
|
|
|
progress_bar = TermProgressBar(fd=sys.stderr, title="euscan")
|
|
|
|
except TypeError:
|
|
|
|
progress_bar = TermProgressBar(title="euscan")
|
2012-05-27 10:09:39 +02:00
|
|
|
|
2012-06-28 12:20:57 +02:00
|
|
|
progress_handler = ProgressHandler(progress_bar)
|
2012-05-27 10:09:39 +02:00
|
|
|
on_progress = progress_handler.on_progress
|
|
|
|
|
|
|
|
def display():
|
|
|
|
progress_bar.set(progress_handler.curval, progress_handler.maxval)
|
|
|
|
progress_handler.display = display
|
|
|
|
|
|
|
|
def sigwinch_handler(signum, frame):
|
|
|
|
lines, progress_bar.term_columns = portage.output.get_term_size()
|
|
|
|
signal.signal(signal.SIGWINCH, sigwinch_handler)
|
|
|
|
|
|
|
|
yield on_progress
|
|
|
|
|
|
|
|
# make sure the final progress is displayed
|
|
|
|
progress_handler.display()
|
|
|
|
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
|
|
|
|
|
|
|
|
yield None
|
2012-05-25 00:19:17 +02:00
|
|
|
|
2012-05-25 10:23:33 +02:00
|
|
|
|
2012-07-02 17:44:55 +02:00
|
|
|
def clean_colors(string):
|
|
|
|
if type(string) is str:
|
|
|
|
string = re.sub("\033\[[0-9;]+m", "", string)
|
|
|
|
string = re.sub(r"\\u001b\[[0-9;]+m", "", string)
|
|
|
|
string = re.sub(r"\x1b\[[0-9;]+m", "", string)
|
|
|
|
return string
|
|
|
|
|
|
|
|
|
2012-07-20 08:22:52 +02:00
|
|
|
def transform_url(config, cpv, url):
|
|
|
|
if config['mirror']:
|
|
|
|
url = to_mirror(url)
|
|
|
|
if config['ebuild-uri']:
|
|
|
|
url = to_ebuild_uri(cpv, url)
|
|
|
|
return url
|
|
|
|
|
2012-07-20 15:12:44 +02:00
|
|
|
|
2012-07-20 08:22:52 +02:00
|
|
|
def to_ebuild_uri(cpv, url):
|
|
|
|
cat, pkg, ver, rev = portage.catpkgsplit(cpv)
|
|
|
|
p = '%s-%s' % (pkg, ver)
|
|
|
|
pvr = '%s%s' % (ver, '-%s' % rev if rev != 'r0' else '')
|
|
|
|
pf = '%s-%s' % (pkg, pvr)
|
|
|
|
evars = (
|
2012-07-20 15:12:44 +02:00
|
|
|
(p, 'P'),
|
2012-07-20 08:22:52 +02:00
|
|
|
(pkg, 'PN'),
|
|
|
|
(ver, 'PV'),
|
|
|
|
(rev, 'PR'),
|
|
|
|
(pvr, 'PVR'),
|
2012-07-20 15:12:44 +02:00
|
|
|
(pf, 'PF'),
|
2012-07-20 08:22:52 +02:00
|
|
|
(cat, 'CATEGORY')
|
|
|
|
)
|
|
|
|
for src, dst in evars:
|
|
|
|
url = url.replace(src, '${%s}' % dst)
|
|
|
|
return url
|
|
|
|
|
2012-08-07 09:39:49 +02:00
|
|
|
|
2012-08-07 08:44:02 +02:00
|
|
|
def load_mirrors():
|
|
|
|
import random
|
2012-08-06 23:30:29 +02:00
|
|
|
|
|
|
|
global mirrors_
|
|
|
|
if mirrors_ is None:
|
|
|
|
mirrors_ = portage.settings.thirdpartymirrors()
|
2012-08-07 08:44:02 +02:00
|
|
|
for mirror_name in mirrors_:
|
|
|
|
random.shuffle(mirrors_[mirror_name])
|
|
|
|
|
2012-08-07 09:39:49 +02:00
|
|
|
|
2012-08-07 08:44:02 +02:00
|
|
|
def from_mirror(url):
|
|
|
|
if not url.startswith('mirror://'):
|
|
|
|
return url
|
|
|
|
|
|
|
|
if not mirrors_:
|
|
|
|
load_mirrors()
|
2012-08-06 23:30:29 +02:00
|
|
|
|
|
|
|
for mirror_name in mirrors_:
|
|
|
|
prefix = 'mirror://' + mirror_name
|
|
|
|
if url.startswith(prefix):
|
|
|
|
return url.replace(prefix, mirrors_[mirror_name][0])
|
|
|
|
|
|
|
|
return url
|
2012-07-20 15:12:44 +02:00
|
|
|
|
2012-08-07 09:39:49 +02:00
|
|
|
|
2012-07-02 17:44:55 +02:00
|
|
|
def to_mirror(url):
|
2012-08-07 08:44:02 +02:00
|
|
|
if not mirrors_:
|
|
|
|
load_mirrors()
|
2012-08-06 23:30:29 +02:00
|
|
|
|
|
|
|
for mirror_name in mirrors_:
|
2012-08-07 09:39:49 +02:00
|
|
|
for mirror_url in mirrors_[mirror_name]:
|
2012-07-02 17:44:55 +02:00
|
|
|
if url.startswith(mirror_url):
|
|
|
|
url_part = url.split(mirror_url)[1]
|
|
|
|
return "mirror://%s%s%s" % (
|
|
|
|
mirror_name,
|
|
|
|
"" if url_part.startswith("/") else "/",
|
|
|
|
url_part
|
|
|
|
)
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
2012-05-25 00:19:17 +02:00
|
|
|
class EOutputMem(EOutput):
|
|
|
|
"""
|
|
|
|
Override of EOutput, allows to specify an output file for writes
|
|
|
|
"""
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(EOutputMem, self).__init__(*args, **kwargs)
|
|
|
|
self.out = StringIO()
|
|
|
|
|
|
|
|
def getvalue(self):
|
2012-07-20 15:12:44 +02:00
|
|
|
return self.out.getvalue()
|
2012-05-25 00:19:17 +02:00
|
|
|
|
|
|
|
def _write(self, f, msg):
|
|
|
|
super(EOutputMem, self)._write(self.out, msg)
|
|
|
|
|
2012-05-25 10:23:33 +02:00
|
|
|
|
2012-05-25 00:19:17 +02:00
|
|
|
class EuscanOutput(object):
|
|
|
|
"""
|
|
|
|
Class that handles output for euscan
|
|
|
|
"""
|
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
self.queries = defaultdict(dict)
|
|
|
|
self.current_query = None
|
|
|
|
|
2012-06-12 14:55:58 +02:00
|
|
|
def clean(self):
|
|
|
|
self.queries = defaultdict(dict)
|
|
|
|
self.current_query = None
|
|
|
|
|
2012-05-25 00:19:17 +02:00
|
|
|
def set_query(self, query):
|
|
|
|
self.current_query = query
|
|
|
|
if query is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
if query in self.queries:
|
|
|
|
return
|
|
|
|
|
|
|
|
if self.config["format"]:
|
|
|
|
output = EOutputMem()
|
|
|
|
else:
|
|
|
|
output = EOutput()
|
|
|
|
|
|
|
|
self.queries[query] = {
|
|
|
|
"output": output,
|
|
|
|
"result": [],
|
|
|
|
"metadata": {},
|
|
|
|
}
|
|
|
|
|
2012-05-31 18:42:37 +02:00
|
|
|
def get_formatted_output(self, format_=None):
|
2012-05-25 00:19:17 +02:00
|
|
|
data = {}
|
|
|
|
|
|
|
|
for query in self.queries:
|
|
|
|
data[query] = {
|
|
|
|
"result": self.queries[query]["result"],
|
|
|
|
"metadata": self.queries[query]["metadata"],
|
|
|
|
"messages": self.queries[query]["output"].getvalue(),
|
|
|
|
}
|
|
|
|
|
2012-05-31 18:42:37 +02:00
|
|
|
format_ = format_ or self.config["format"]
|
|
|
|
if format_.lower() == "json":
|
2012-05-25 00:19:17 +02:00
|
|
|
return json.dumps(data, indent=self.config["indent"])
|
2012-09-02 18:39:04 +02:00
|
|
|
elif format_.lower() == "xml":
|
|
|
|
return dict_to_xml(data, indent=self.config["indent"])
|
2012-05-31 18:42:37 +02:00
|
|
|
elif format_.lower() == "dict":
|
|
|
|
return data
|
2012-05-25 00:19:17 +02:00
|
|
|
else:
|
|
|
|
raise TypeError("Invalid output format")
|
|
|
|
|
2012-07-02 17:44:55 +02:00
|
|
|
def result(self, cp, version, urls, handler, confidence):
|
2012-07-20 09:04:15 +02:00
|
|
|
from euscan.version import get_version_type
|
2012-05-25 00:19:17 +02:00
|
|
|
|
2012-07-20 08:22:52 +02:00
|
|
|
cpv = '%s-%s' % (cp, version)
|
2012-07-20 15:12:44 +02:00
|
|
|
urls = ' '.join(
|
|
|
|
transform_url(self.config, cpv, url) for url in urls.split()
|
|
|
|
)
|
2012-07-20 08:22:52 +02:00
|
|
|
|
2012-07-20 15:12:44 +02:00
|
|
|
if self.config['format'] in ['json', 'dict']:
|
2012-05-25 00:19:17 +02:00
|
|
|
_curr = self.queries[self.current_query]
|
|
|
|
_curr["result"].append(
|
2012-07-02 17:44:55 +02:00
|
|
|
{
|
|
|
|
"version": version,
|
2012-07-20 08:22:52 +02:00
|
|
|
"urls": urls.split(),
|
2012-07-02 17:44:55 +02:00
|
|
|
"handler": handler,
|
|
|
|
"confidence": confidence,
|
|
|
|
"type": get_version_type(version)
|
|
|
|
}
|
2012-05-25 00:19:17 +02:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
if not self.config['quiet']:
|
2019-12-05 17:46:19 +01:00
|
|
|
print("Upstream Version:", pp.number("%s" % version), end=' ')
|
|
|
|
print(pp.path(" %s" % urls))
|
2012-05-25 00:19:17 +02:00
|
|
|
else:
|
2019-12-05 17:46:19 +01:00
|
|
|
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
|
2012-05-25 00:19:17 +02:00
|
|
|
|
|
|
|
def metadata(self, key, value, show=True):
|
|
|
|
if self.config["format"]:
|
2012-07-20 15:12:44 +02:00
|
|
|
self.queries[self.current_query]["metadata"][key] = value
|
2012-05-25 00:19:17 +02:00
|
|
|
elif show:
|
2019-12-05 17:46:19 +01:00
|
|
|
print("%s: %s" % (key.capitalize(), value))
|
2012-05-25 00:19:17 +02:00
|
|
|
|
|
|
|
def __getattr__(self, key):
|
2012-05-31 18:42:37 +02:00
|
|
|
if not self.config["quiet"] and self.current_query is not None:
|
2012-05-27 10:09:39 +02:00
|
|
|
output = self.queries[self.current_query]["output"]
|
|
|
|
return getattr(output, key)
|
|
|
|
else:
|
|
|
|
return lambda *x: None
|