euscan: better --quiet mode

Signed-off-by: Corentin Chary <corentincj@iksaif.net>
This commit is contained in:
Corentin Chary 2011-12-11 14:32:38 +01:00
parent e71e540d0d
commit 3569f1bb97
7 changed files with 108 additions and 85 deletions

View File

@ -2,6 +2,14 @@
Change history Change history
================ ================
.. _version-0.2.0:
0.2.0
=====
:release-date: ???
* Better --quiet mode
.. _version-0.1.0: .. _version-0.1.0:
0.1.0 0.1.0

View File

@ -29,7 +29,8 @@ from gentoolkit import pprinter as pp
from gentoolkit.eclean.search import (port_settings) from gentoolkit.eclean.search import (port_settings)
from gentoolkit.errors import GentoolkitException from gentoolkit.errors import GentoolkitException
from euscan import CONFIG, output import euscan
from euscan import CONFIG
from euscan.scan import scan_upstream from euscan.scan import scan_upstream
""" Globals """ """ Globals """
@ -198,7 +199,7 @@ def main():
sys.exit(errno.EINVAL) sys.exit(errno.EINVAL)
""" Change euscan's output """ """ Change euscan's output """
output = EOutput(CONFIG['quiet']) euscan.output = EOutput(CONFIG['quiet'])
if CONFIG['verbose'] > 2: if CONFIG['verbose'] > 2:
httplib.HTTPConnection.debuglevel = 1 httplib.HTTPConnection.debuglevel = 1
@ -223,14 +224,19 @@ def main():
except Exception as err: except Exception as err:
pp.die(1, str(err)) pp.die(1, str(err))
if not CONFIG['quiet']:
print () print ()
for url, version in ret: for cp, url, version in ret:
if not CONFIG['quiet']:
print ("Upstream Version: " print ("Upstream Version: "
+ pp.number("%s" % version) + pp.number("%s" % version)
+ pp.path(" %s" % url)) + pp.path(" %s" % url))
else:
print (pp.cpv("%s-%s" % (cp, version))
+ ": " + pp.path(url))
if not len(ret): if not len(ret) and not CONFIG['quiet']:
print (pp.warn("Didn't find any new version, " print (pp.warn("Didn't find any new version, "
+ "check package's homepage for " + "check package's homepage for "
+ "more informations")); + "more informations"));

View File

@ -6,8 +6,9 @@ from BeautifulSoup import BeautifulSoup
import portage import portage
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS, output from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
from euscan import helpers from euscan import helpers
import euscan
def scan_html(data, url, pattern): def scan_html(data, url, pattern):
soup = BeautifulSoup(data) soup = BeautifulSoup(data)
@ -48,7 +49,7 @@ def scan_directory_recursive(cpv, url, steps):
steps = steps[1:] steps = steps[1:]
output.einfo("Scanning: %s" % url) euscan.output.einfo("Scanning: %s" % url)
try: try:
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
@ -91,7 +92,7 @@ def scan_directory_recursive(cpv, url, steps):
def scan(cpv, url): def scan(cpv, url):
for bu in SCANDIR_BLACKLIST_URLS: for bu in SCANDIR_BLACKLIST_URLS:
if re.match(bu, url): if re.match(bu, url):
output.einfo("%s is blacklisted by rule %s" % (url, bu)) euscan.output.einfo("%s is blacklisted by rule %s" % (url, bu))
return [] return []
resolved_url = helpers.parse_mirror(url) resolved_url = helpers.parse_mirror(url)
@ -100,11 +101,11 @@ def scan(cpv, url):
template = helpers.template_from_url(resolved_url, ver) template = helpers.template_from_url(resolved_url, ver)
if '${' not in template: if '${' not in template:
output.einfo("Url doesn't seems to depend on version: %s not found in %s" euscan.output.einfo("Url doesn't seems to depend on version: %s not found in %s"
% (ver, resolved_url)) % (ver, resolved_url))
return [] return []
else: else:
output.einfo("Scanning: %s" % template) euscan.output.einfo("Scanning: %s" % template)
steps = helpers.generate_scan_paths(template) steps = helpers.generate_scan_paths(template)
return scan_directory_recursive(cpv, "", steps) return scan_directory_recursive(cpv, "", steps)
@ -116,15 +117,15 @@ def brute_force(cpv, url):
for bp in BRUTEFORCE_BLACKLIST_PACKAGES: for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
if re.match(bp, cp): if re.match(bp, cp):
output.einfo("%s is blacklisted by rule %s" % (cp, bp)) euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
return [] return []
for bp in BRUTEFORCE_BLACKLIST_URLS: for bp in BRUTEFORCE_BLACKLIST_URLS:
if re.match(bp, url): if re.match(bp, url):
output.einfo("%s is blacklisted by rule %s" % (cp, bp)) euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
return [] return []
output.einfo("Generating version from " + ver) euscan.output.einfo("Generating version from " + ver)
components = helpers.split_version(ver) components = helpers.split_version(ver)
versions = helpers.gen_versions(components, CONFIG["brute-force"]) versions = helpers.gen_versions(components, CONFIG["brute-force"])
@ -135,17 +136,17 @@ def brute_force(cpv, url):
versions.remove(v) versions.remove(v)
if not versions: if not versions:
output.einfo("Can't generate new versions from " + ver) euscan.output.einfo("Can't generate new versions from " + ver)
return [] return []
template = helpers.template_from_url(url, ver) template = helpers.template_from_url(url, ver)
if '${PV}' not in template: if '${PV}' not in template:
output.einfo("Url doesn't seems to depend on full version: %s not found in %s" euscan.output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
% (ver, url)) % (ver, url))
return [] return []
else: else:
output.einfo("Brute forcing: %s" % template) euscan.output.einfo("Brute forcing: %s" % template)
result = [] result = []
@ -173,7 +174,7 @@ def brute_force(cpv, url):
result.append([url, version]) result.append([url, version])
if len(result) > CONFIG['brute-force-false-watermark']: if len(result) > CONFIG['brute-force-false-watermark']:
output.einfo("Broken server detected ! Skipping brute force.") euscan.output.einfo("Broken server detected ! Skipping brute force.")
return [] return []
if CONFIG["brute-force-recursive"]: if CONFIG["brute-force-recursive"]:

View File

@ -3,7 +3,8 @@ import portage
import urllib2 import urllib2
import xml.dom.minidom import xml.dom.minidom
from euscan import helpers, output from euscan import helpers
import euscan
def can_handle(cpv, url): def can_handle(cpv, url):
if url.startswith('http://pear.php.net/get/'): if url.startswith('http://pear.php.net/get/'):
@ -29,7 +30,7 @@ def scan(cpv, url):
orig_url = url orig_url = url
url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower()) url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())
output.einfo("Using: " + url) euscan.output.einfo("Using: " + url)
try: try:
fp = helpers.urlopen(url) fp = helpers.urlopen(url)

View File

@ -3,7 +3,8 @@ import portage
import json import json
import urllib2 import urllib2
from euscan import helpers, output from euscan import helpers
import euscan
def can_handle(cpv, url): def can_handle(cpv, url):
return url.startswith('mirror://rubygems/') return url.startswith('mirror://rubygems/')
@ -24,7 +25,7 @@ def scan(cpv, url):
gem = guess_gem(cpv, url) gem = guess_gem(cpv, url)
url = 'http://rubygems.org/api/v1/versions/%s.json' % gem url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
output.einfo("Using: " + url) euscan.output.einfo("Using: " + url)
try: try:
fp = helpers.urlopen(url) fp = helpers.urlopen(url)

View File

@ -15,7 +15,8 @@ except ImportError:
import portage import portage
from portage import dep from portage import dep
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS, output from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS
import euscan
def htop_vercmp(a, b): def htop_vercmp(a, b):
def fixver(v): def fixver(v):
@ -87,7 +88,7 @@ def version_blacklisted(cp, version):
None None
if rule: if rule:
output.einfo("%s is blacklisted by rule %s" % (cpv, bv)) euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
return rule is not None return rule is not None
def version_filtered(cp, base, version): def version_filtered(cp, base, version):
@ -254,7 +255,7 @@ def urlallowed(url):
def urlopen(url, timeout=None, verb="GET"): def urlopen(url, timeout=None, verb="GET"):
if not urlallowed(url): if not urlallowed(url):
output.einfo("Url '%s' blocked by robots.txt" % url) euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
return None return None
if not timeout: if not timeout:
@ -287,17 +288,17 @@ def tryurl(fileurl, template):
result = True result = True
if not urlallowed(fileurl): if not urlallowed(fileurl):
output.einfo("Url '%s' blocked by robots.txt" % fileurl) euscan.output.einfo("Url '%s' blocked by robots.txt" % fileurl)
return None return None
output.ebegin("Trying: " + fileurl) euscan.output.ebegin("Trying: " + fileurl)
try: try:
basename = os.path.basename(fileurl) basename = os.path.basename(fileurl)
fp = urlopen(fileurl, verb='HEAD') fp = urlopen(fileurl, verb='HEAD')
if not fp: if not fp:
output.eend(errno.EPERM) euscan.output.eend(errno.EPERM)
return None return None
headers = fp.info() headers = fp.info()
@ -328,7 +329,7 @@ def tryurl(fileurl, template):
except IOError: except IOError:
result = None result = None
output.eend(errno.ENOENT if not result else 0) euscan.output.eend(errno.ENOENT if not result else 0)
return result return result
@ -383,8 +384,8 @@ def parse_mirror(uri):
eidx = uri.find("/", 9) eidx = uri.find("/", 9)
if eidx == -1: if eidx == -1:
output.einfo("Invalid mirror definition in SRC_URI:\n") euscan.output.einfo("Invalid mirror definition in SRC_URI:\n")
output.einfo(" %s\n" % (uri)) euscan.output.einfo(" %s\n" % (uri))
return None return None
mirrorname = uri[9:eidx] mirrorname = uri[9:eidx]
@ -395,7 +396,7 @@ def parse_mirror(uri):
shuffle(mirrors) shuffle(mirrors)
uri = mirrors[0].strip("/") + "/" + path uri = mirrors[0].strip("/") + "/" + path
else: else:
output.einfo("No known mirror by the name: %s\n" % (mirrorname)) euscan.output.einfo("No known mirror by the name: %s\n" % (mirrorname))
return None return None
return uri return uri

View File

@ -11,10 +11,12 @@ from gentoolkit import errors
from gentoolkit.query import Query from gentoolkit.query import Query
from gentoolkit.eclean.search import (port_settings) from gentoolkit.eclean.search import (port_settings)
from euscan import CONFIG, BLACKLIST_PACKAGES, output from euscan import CONFIG, BLACKLIST_PACKAGES
from euscan import handlers from euscan import handlers
from euscan import helpers from euscan import helpers
import euscan
def filter_versions(cp, versions): def filter_versions(cp, versions):
filtered = {} filtered = {}
@ -31,18 +33,19 @@ def filter_versions(cp, versions):
filtered[version] = url filtered[version] = url
return [ (filtered[version], version) for version in filtered ] return [ (cp, filtered[version], version) for version in filtered ]
def scan_upstream_urls(cpv, urls): def scan_upstream_urls(cpv, urls):
versions = [] versions = []
for filename in urls: for filename in urls:
for url in urls[filename]: for url in urls[filename]:
if not CONFIG['quiet']:
pp.uprint() pp.uprint()
output.einfo("SRC_URI is '%s'" % url) euscan.output.einfo("SRC_URI is '%s'" % url)
if '://' not in url: if '://' not in url:
output.einfo("Invalid url '%s'" % url) euscan.output.einfo("Invalid url '%s'" % url)
continue continue
''' Try normal scan ''' ''' Try normal scan '''
@ -80,13 +83,15 @@ def scan_upstream(query):
pkg = matches.pop() pkg = matches.pop()
if not pkg: if not pkg:
sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp))) sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)"
% pp.pkgquery(pkg.cp)))
return [] return []
if pkg.cp in BLACKLIST_PACKAGES: if pkg.cp in BLACKLIST_PACKAGES:
sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp))) sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
return [] return []
if not CONFIG['quiet']:
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))) pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
pp.uprint() pp.uprint()