euscan: better --quiet mode
Signed-off-by: Corentin Chary <corentincj@iksaif.net>
This commit is contained in:
parent
e71e540d0d
commit
3569f1bb97
@ -2,6 +2,14 @@
|
|||||||
Change history
|
Change history
|
||||||
================
|
================
|
||||||
|
|
||||||
|
.. _version-0.2.0:
|
||||||
|
|
||||||
|
0.2.0
|
||||||
|
=====
|
||||||
|
:release-date: ???
|
||||||
|
|
||||||
|
* Better --quiet mode
|
||||||
|
|
||||||
.. _version-0.1.0:
|
.. _version-0.1.0:
|
||||||
|
|
||||||
0.1.0
|
0.1.0
|
||||||
|
42
bin/euscan
42
bin/euscan
@ -29,7 +29,8 @@ from gentoolkit import pprinter as pp
|
|||||||
from gentoolkit.eclean.search import (port_settings)
|
from gentoolkit.eclean.search import (port_settings)
|
||||||
from gentoolkit.errors import GentoolkitException
|
from gentoolkit.errors import GentoolkitException
|
||||||
|
|
||||||
from euscan import CONFIG, output
|
import euscan
|
||||||
|
from euscan import CONFIG
|
||||||
from euscan.scan import scan_upstream
|
from euscan.scan import scan_upstream
|
||||||
|
|
||||||
""" Globals """
|
""" Globals """
|
||||||
@ -198,7 +199,7 @@ def main():
|
|||||||
sys.exit(errno.EINVAL)
|
sys.exit(errno.EINVAL)
|
||||||
|
|
||||||
""" Change euscan's output """
|
""" Change euscan's output """
|
||||||
output = EOutput(CONFIG['quiet'])
|
euscan.output = EOutput(CONFIG['quiet'])
|
||||||
if CONFIG['verbose'] > 2:
|
if CONFIG['verbose'] > 2:
|
||||||
httplib.HTTPConnection.debuglevel = 1
|
httplib.HTTPConnection.debuglevel = 1
|
||||||
|
|
||||||
@ -208,29 +209,34 @@ def main():
|
|||||||
try:
|
try:
|
||||||
ret = scan_upstream(package)
|
ret = scan_upstream(package)
|
||||||
except AmbiguousPackageName as e:
|
except AmbiguousPackageName as e:
|
||||||
pkgs = e.args[0]
|
pkgs = e.args[0]
|
||||||
for candidate in pkgs:
|
for candidate in pkgs:
|
||||||
print(candidate)
|
print(candidate)
|
||||||
|
|
||||||
from os.path import basename # To get the short name
|
from os.path import basename # To get the short name
|
||||||
|
|
||||||
print(file=sys.stderr)
|
print(file=sys.stderr)
|
||||||
print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
|
print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
|
||||||
file=sys.stderr, end="")
|
file=sys.stderr, end="")
|
||||||
pp.die(1, "one of the above fully-qualified ebuild names instead.")
|
pp.die(1, "one of the above fully-qualified ebuild names instead.")
|
||||||
except GentoolkitException as err:
|
except GentoolkitException as err:
|
||||||
pp.die(1, str(err))
|
pp.die(1, str(err))
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
pp.die(1, str(err))
|
pp.die(1, str(err))
|
||||||
|
|
||||||
print ()
|
if not CONFIG['quiet']:
|
||||||
|
print ()
|
||||||
|
|
||||||
for url, version in ret:
|
for cp, url, version in ret:
|
||||||
print ("Upstream Version: "
|
if not CONFIG['quiet']:
|
||||||
+ pp.number("%s" % version)
|
print ("Upstream Version: "
|
||||||
+ pp.path(" %s" % url))
|
+ pp.number("%s" % version)
|
||||||
|
+ pp.path(" %s" % url))
|
||||||
|
else:
|
||||||
|
print (pp.cpv("%s-%s" % (cp, version))
|
||||||
|
+ ": " + pp.path(url))
|
||||||
|
|
||||||
if not len(ret):
|
if not len(ret) and not CONFIG['quiet']:
|
||||||
print (pp.warn("Didn't find any new version, "
|
print (pp.warn("Didn't find any new version, "
|
||||||
+ "check package's homepage for "
|
+ "check package's homepage for "
|
||||||
+ "more informations"));
|
+ "more informations"));
|
||||||
|
@ -6,8 +6,9 @@ from BeautifulSoup import BeautifulSoup
|
|||||||
|
|
||||||
import portage
|
import portage
|
||||||
|
|
||||||
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS, output
|
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
|
||||||
from euscan import helpers
|
from euscan import helpers
|
||||||
|
import euscan
|
||||||
|
|
||||||
def scan_html(data, url, pattern):
|
def scan_html(data, url, pattern):
|
||||||
soup = BeautifulSoup(data)
|
soup = BeautifulSoup(data)
|
||||||
@ -48,7 +49,7 @@ def scan_directory_recursive(cpv, url, steps):
|
|||||||
|
|
||||||
steps = steps[1:]
|
steps = steps[1:]
|
||||||
|
|
||||||
output.einfo("Scanning: %s" % url)
|
euscan.output.einfo("Scanning: %s" % url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
@ -91,7 +92,7 @@ def scan_directory_recursive(cpv, url, steps):
|
|||||||
def scan(cpv, url):
|
def scan(cpv, url):
|
||||||
for bu in SCANDIR_BLACKLIST_URLS:
|
for bu in SCANDIR_BLACKLIST_URLS:
|
||||||
if re.match(bu, url):
|
if re.match(bu, url):
|
||||||
output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
euscan.output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
resolved_url = helpers.parse_mirror(url)
|
resolved_url = helpers.parse_mirror(url)
|
||||||
@ -100,11 +101,11 @@ def scan(cpv, url):
|
|||||||
|
|
||||||
template = helpers.template_from_url(resolved_url, ver)
|
template = helpers.template_from_url(resolved_url, ver)
|
||||||
if '${' not in template:
|
if '${' not in template:
|
||||||
output.einfo("Url doesn't seems to depend on version: %s not found in %s"
|
euscan.output.einfo("Url doesn't seems to depend on version: %s not found in %s"
|
||||||
% (ver, resolved_url))
|
% (ver, resolved_url))
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
output.einfo("Scanning: %s" % template)
|
euscan.output.einfo("Scanning: %s" % template)
|
||||||
|
|
||||||
steps = helpers.generate_scan_paths(template)
|
steps = helpers.generate_scan_paths(template)
|
||||||
return scan_directory_recursive(cpv, "", steps)
|
return scan_directory_recursive(cpv, "", steps)
|
||||||
@ -116,15 +117,15 @@ def brute_force(cpv, url):
|
|||||||
|
|
||||||
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
||||||
if re.match(bp, cp):
|
if re.match(bp, cp):
|
||||||
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
||||||
if re.match(bp, url):
|
if re.match(bp, url):
|
||||||
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
output.einfo("Generating version from " + ver)
|
euscan.output.einfo("Generating version from " + ver)
|
||||||
|
|
||||||
components = helpers.split_version(ver)
|
components = helpers.split_version(ver)
|
||||||
versions = helpers.gen_versions(components, CONFIG["brute-force"])
|
versions = helpers.gen_versions(components, CONFIG["brute-force"])
|
||||||
@ -135,17 +136,17 @@ def brute_force(cpv, url):
|
|||||||
versions.remove(v)
|
versions.remove(v)
|
||||||
|
|
||||||
if not versions:
|
if not versions:
|
||||||
output.einfo("Can't generate new versions from " + ver)
|
euscan.output.einfo("Can't generate new versions from " + ver)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
template = helpers.template_from_url(url, ver)
|
template = helpers.template_from_url(url, ver)
|
||||||
|
|
||||||
if '${PV}' not in template:
|
if '${PV}' not in template:
|
||||||
output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
|
euscan.output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
|
||||||
% (ver, url))
|
% (ver, url))
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
output.einfo("Brute forcing: %s" % template)
|
euscan.output.einfo("Brute forcing: %s" % template)
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
@ -173,7 +174,7 @@ def brute_force(cpv, url):
|
|||||||
result.append([url, version])
|
result.append([url, version])
|
||||||
|
|
||||||
if len(result) > CONFIG['brute-force-false-watermark']:
|
if len(result) > CONFIG['brute-force-false-watermark']:
|
||||||
output.einfo("Broken server detected ! Skipping brute force.")
|
euscan.output.einfo("Broken server detected ! Skipping brute force.")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if CONFIG["brute-force-recursive"]:
|
if CONFIG["brute-force-recursive"]:
|
||||||
|
@ -3,7 +3,8 @@ import portage
|
|||||||
import urllib2
|
import urllib2
|
||||||
import xml.dom.minidom
|
import xml.dom.minidom
|
||||||
|
|
||||||
from euscan import helpers, output
|
from euscan import helpers
|
||||||
|
import euscan
|
||||||
|
|
||||||
def can_handle(cpv, url):
|
def can_handle(cpv, url):
|
||||||
if url.startswith('http://pear.php.net/get/'):
|
if url.startswith('http://pear.php.net/get/'):
|
||||||
@ -29,7 +30,7 @@ def scan(cpv, url):
|
|||||||
orig_url = url
|
orig_url = url
|
||||||
url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())
|
url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())
|
||||||
|
|
||||||
output.einfo("Using: " + url)
|
euscan.output.einfo("Using: " + url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
|
@ -3,7 +3,8 @@ import portage
|
|||||||
import json
|
import json
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
from euscan import helpers, output
|
from euscan import helpers
|
||||||
|
import euscan
|
||||||
|
|
||||||
def can_handle(cpv, url):
|
def can_handle(cpv, url):
|
||||||
return url.startswith('mirror://rubygems/')
|
return url.startswith('mirror://rubygems/')
|
||||||
@ -24,7 +25,7 @@ def scan(cpv, url):
|
|||||||
gem = guess_gem(cpv, url)
|
gem = guess_gem(cpv, url)
|
||||||
url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
|
url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
|
||||||
|
|
||||||
output.einfo("Using: " + url)
|
euscan.output.einfo("Using: " + url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fp = helpers.urlopen(url)
|
fp = helpers.urlopen(url)
|
||||||
|
@ -15,7 +15,8 @@ except ImportError:
|
|||||||
import portage
|
import portage
|
||||||
from portage import dep
|
from portage import dep
|
||||||
|
|
||||||
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS, output
|
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS
|
||||||
|
import euscan
|
||||||
|
|
||||||
def htop_vercmp(a, b):
|
def htop_vercmp(a, b):
|
||||||
def fixver(v):
|
def fixver(v):
|
||||||
@ -87,7 +88,7 @@ def version_blacklisted(cp, version):
|
|||||||
None
|
None
|
||||||
|
|
||||||
if rule:
|
if rule:
|
||||||
output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
|
euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
|
||||||
return rule is not None
|
return rule is not None
|
||||||
|
|
||||||
def version_filtered(cp, base, version):
|
def version_filtered(cp, base, version):
|
||||||
@ -254,7 +255,7 @@ def urlallowed(url):
|
|||||||
|
|
||||||
def urlopen(url, timeout=None, verb="GET"):
|
def urlopen(url, timeout=None, verb="GET"):
|
||||||
if not urlallowed(url):
|
if not urlallowed(url):
|
||||||
output.einfo("Url '%s' blocked by robots.txt" % url)
|
euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not timeout:
|
if not timeout:
|
||||||
@ -287,17 +288,17 @@ def tryurl(fileurl, template):
|
|||||||
result = True
|
result = True
|
||||||
|
|
||||||
if not urlallowed(fileurl):
|
if not urlallowed(fileurl):
|
||||||
output.einfo("Url '%s' blocked by robots.txt" % fileurl)
|
euscan.output.einfo("Url '%s' blocked by robots.txt" % fileurl)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
output.ebegin("Trying: " + fileurl)
|
euscan.output.ebegin("Trying: " + fileurl)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
basename = os.path.basename(fileurl)
|
basename = os.path.basename(fileurl)
|
||||||
|
|
||||||
fp = urlopen(fileurl, verb='HEAD')
|
fp = urlopen(fileurl, verb='HEAD')
|
||||||
if not fp:
|
if not fp:
|
||||||
output.eend(errno.EPERM)
|
euscan.output.eend(errno.EPERM)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
headers = fp.info()
|
headers = fp.info()
|
||||||
@ -328,7 +329,7 @@ def tryurl(fileurl, template):
|
|||||||
except IOError:
|
except IOError:
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
output.eend(errno.ENOENT if not result else 0)
|
euscan.output.eend(errno.ENOENT if not result else 0)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -383,8 +384,8 @@ def parse_mirror(uri):
|
|||||||
|
|
||||||
eidx = uri.find("/", 9)
|
eidx = uri.find("/", 9)
|
||||||
if eidx == -1:
|
if eidx == -1:
|
||||||
output.einfo("Invalid mirror definition in SRC_URI:\n")
|
euscan.output.einfo("Invalid mirror definition in SRC_URI:\n")
|
||||||
output.einfo(" %s\n" % (uri))
|
euscan.output.einfo(" %s\n" % (uri))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
mirrorname = uri[9:eidx]
|
mirrorname = uri[9:eidx]
|
||||||
@ -395,7 +396,7 @@ def parse_mirror(uri):
|
|||||||
shuffle(mirrors)
|
shuffle(mirrors)
|
||||||
uri = mirrors[0].strip("/") + "/" + path
|
uri = mirrors[0].strip("/") + "/" + path
|
||||||
else:
|
else:
|
||||||
output.einfo("No known mirror by the name: %s\n" % (mirrorname))
|
euscan.output.einfo("No known mirror by the name: %s\n" % (mirrorname))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return uri
|
return uri
|
||||||
|
@ -11,10 +11,12 @@ from gentoolkit import errors
|
|||||||
from gentoolkit.query import Query
|
from gentoolkit.query import Query
|
||||||
from gentoolkit.eclean.search import (port_settings)
|
from gentoolkit.eclean.search import (port_settings)
|
||||||
|
|
||||||
from euscan import CONFIG, BLACKLIST_PACKAGES, output
|
from euscan import CONFIG, BLACKLIST_PACKAGES
|
||||||
from euscan import handlers
|
from euscan import handlers
|
||||||
from euscan import helpers
|
from euscan import helpers
|
||||||
|
|
||||||
|
import euscan
|
||||||
|
|
||||||
def filter_versions(cp, versions):
|
def filter_versions(cp, versions):
|
||||||
filtered = {}
|
filtered = {}
|
||||||
|
|
||||||
@ -31,18 +33,19 @@ def filter_versions(cp, versions):
|
|||||||
|
|
||||||
filtered[version] = url
|
filtered[version] = url
|
||||||
|
|
||||||
return [ (filtered[version], version) for version in filtered ]
|
return [ (cp, filtered[version], version) for version in filtered ]
|
||||||
|
|
||||||
def scan_upstream_urls(cpv, urls):
|
def scan_upstream_urls(cpv, urls):
|
||||||
versions = []
|
versions = []
|
||||||
|
|
||||||
for filename in urls:
|
for filename in urls:
|
||||||
for url in urls[filename]:
|
for url in urls[filename]:
|
||||||
pp.uprint()
|
if not CONFIG['quiet']:
|
||||||
output.einfo("SRC_URI is '%s'" % url)
|
pp.uprint()
|
||||||
|
euscan.output.einfo("SRC_URI is '%s'" % url)
|
||||||
|
|
||||||
if '://' not in url:
|
if '://' not in url:
|
||||||
output.einfo("Invalid url '%s'" % url)
|
euscan.output.einfo("Invalid url '%s'" % url)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
''' Try normal scan '''
|
''' Try normal scan '''
|
||||||
@ -64,57 +67,59 @@ def scan_upstream_urls(cpv, urls):
|
|||||||
|
|
||||||
|
|
||||||
def scan_upstream(query):
|
def scan_upstream(query):
|
||||||
matches = Query(query).find(
|
matches = Query(query).find(
|
||||||
include_masked=True,
|
include_masked=True,
|
||||||
in_installed=False
|
in_installed=False
|
||||||
)
|
)
|
||||||
|
|
||||||
if not matches:
|
if not matches:
|
||||||
sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
|
sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
matches = sorted(matches)
|
matches = sorted(matches)
|
||||||
|
pkg = matches.pop()
|
||||||
|
|
||||||
|
while '9999' in pkg.version and len(matches):
|
||||||
pkg = matches.pop()
|
pkg = matches.pop()
|
||||||
|
|
||||||
while '9999' in pkg.version and len(matches):
|
if not pkg:
|
||||||
pkg = matches.pop()
|
sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)"
|
||||||
|
% pp.pkgquery(pkg.cp)))
|
||||||
|
return []
|
||||||
|
|
||||||
if not pkg:
|
if pkg.cp in BLACKLIST_PACKAGES:
|
||||||
sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)))
|
sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if pkg.cp in BLACKLIST_PACKAGES:
|
|
||||||
sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
if not CONFIG['quiet']:
|
||||||
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
|
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
|
||||||
pp.uprint()
|
pp.uprint()
|
||||||
|
|
||||||
ebuild_path = pkg.ebuild_path()
|
ebuild_path = pkg.ebuild_path()
|
||||||
if ebuild_path:
|
if ebuild_path:
|
||||||
pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
|
pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
|
||||||
|
|
||||||
pp.uprint('Repository: ' + pkg.repo_name())
|
pp.uprint('Repository: ' + pkg.repo_name())
|
||||||
pp.uprint('Homepage: ' + pkg.environment("HOMEPAGE"))
|
pp.uprint('Homepage: ' + pkg.environment("HOMEPAGE"))
|
||||||
pp.uprint('Description: ' + pkg.environment("DESCRIPTION"))
|
pp.uprint('Description: ' + pkg.environment("DESCRIPTION"))
|
||||||
|
|
||||||
cpv = pkg.cpv
|
cpv = pkg.cpv
|
||||||
metadata = {
|
metadata = {
|
||||||
"EAPI" : port_settings["EAPI"],
|
"EAPI" : port_settings["EAPI"],
|
||||||
"SRC_URI" : pkg.environment("SRC_URI", False),
|
"SRC_URI" : pkg.environment("SRC_URI", False),
|
||||||
}
|
}
|
||||||
use = frozenset(port_settings["PORTAGE_USE"].split())
|
use = frozenset(port_settings["PORTAGE_USE"].split())
|
||||||
try:
|
try:
|
||||||
alist = porttree._parse_uri_map(cpv, metadata, use=use)
|
alist = porttree._parse_uri_map(cpv, metadata, use=use)
|
||||||
aalist = porttree._parse_uri_map(cpv, metadata)
|
aalist = porttree._parse_uri_map(cpv, metadata)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
sys.stderr.write(pp.warn("%s\n" % str(e)))
|
sys.stderr.write(pp.warn("%s\n" % str(e)))
|
||||||
sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
|
sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if "mirror" in portage.settings.features:
|
if "mirror" in portage.settings.features:
|
||||||
urls = aalist
|
urls = aalist
|
||||||
else:
|
else:
|
||||||
urls = alist
|
urls = alist
|
||||||
|
|
||||||
return scan_upstream_urls(pkg.cpv, urls)
|
return scan_upstream_urls(pkg.cpv, urls)
|
||||||
|
Loading…
Reference in New Issue
Block a user