euscan: better --quiet mode
Signed-off-by: Corentin Chary <corentincj@iksaif.net>
This commit is contained in:
		@@ -6,8 +6,9 @@ from BeautifulSoup import BeautifulSoup
 | 
			
		||||
 | 
			
		||||
import portage
 | 
			
		||||
 | 
			
		||||
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS, output
 | 
			
		||||
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
 | 
			
		||||
from euscan import helpers
 | 
			
		||||
import euscan
 | 
			
		||||
 | 
			
		||||
def scan_html(data, url, pattern):
 | 
			
		||||
    soup = BeautifulSoup(data)
 | 
			
		||||
@@ -48,7 +49,7 @@ def scan_directory_recursive(cpv, url, steps):
 | 
			
		||||
 | 
			
		||||
    steps = steps[1:]
 | 
			
		||||
 | 
			
		||||
    output.einfo("Scanning: %s" % url)
 | 
			
		||||
    euscan.output.einfo("Scanning: %s" % url)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        fp = helpers.urlopen(url)
 | 
			
		||||
@@ -91,7 +92,7 @@ def scan_directory_recursive(cpv, url, steps):
 | 
			
		||||
def scan(cpv, url):
 | 
			
		||||
    for bu in SCANDIR_BLACKLIST_URLS:
 | 
			
		||||
        if re.match(bu, url):
 | 
			
		||||
            output.einfo("%s is blacklisted by rule %s" % (url, bu))
 | 
			
		||||
            euscan.output.einfo("%s is blacklisted by rule %s" % (url, bu))
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
    resolved_url = helpers.parse_mirror(url)
 | 
			
		||||
@@ -100,11 +101,11 @@ def scan(cpv, url):
 | 
			
		||||
 | 
			
		||||
    template = helpers.template_from_url(resolved_url, ver)
 | 
			
		||||
    if '${' not in template:
 | 
			
		||||
        output.einfo("Url doesn't seems to depend on version: %s not found in %s"
 | 
			
		||||
        euscan.output.einfo("Url doesn't seems to depend on version: %s not found in %s"
 | 
			
		||||
                     % (ver, resolved_url))
 | 
			
		||||
        return []
 | 
			
		||||
    else:
 | 
			
		||||
        output.einfo("Scanning: %s" % template)
 | 
			
		||||
        euscan.output.einfo("Scanning: %s" % template)
 | 
			
		||||
 | 
			
		||||
    steps = helpers.generate_scan_paths(template)
 | 
			
		||||
    return scan_directory_recursive(cpv, "", steps)
 | 
			
		||||
@@ -116,15 +117,15 @@ def brute_force(cpv, url):
 | 
			
		||||
 | 
			
		||||
    for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
 | 
			
		||||
        if re.match(bp, cp):
 | 
			
		||||
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
 | 
			
		||||
            euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
    for bp in BRUTEFORCE_BLACKLIST_URLS:
 | 
			
		||||
        if re.match(bp, url):
 | 
			
		||||
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
 | 
			
		||||
            euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
    output.einfo("Generating version from " + ver)
 | 
			
		||||
    euscan.output.einfo("Generating version from " + ver)
 | 
			
		||||
 | 
			
		||||
    components = helpers.split_version(ver)
 | 
			
		||||
    versions = helpers.gen_versions(components, CONFIG["brute-force"])
 | 
			
		||||
@@ -135,17 +136,17 @@ def brute_force(cpv, url):
 | 
			
		||||
            versions.remove(v)
 | 
			
		||||
 | 
			
		||||
    if not versions:
 | 
			
		||||
        output.einfo("Can't generate new versions from " + ver)
 | 
			
		||||
        euscan.output.einfo("Can't generate new versions from " + ver)
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
    template = helpers.template_from_url(url, ver)
 | 
			
		||||
 | 
			
		||||
    if '${PV}' not in template:
 | 
			
		||||
        output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
 | 
			
		||||
        euscan.output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
 | 
			
		||||
                     % (ver, url))
 | 
			
		||||
        return []
 | 
			
		||||
    else:
 | 
			
		||||
        output.einfo("Brute forcing: %s" % template)
 | 
			
		||||
        euscan.output.einfo("Brute forcing: %s" % template)
 | 
			
		||||
 | 
			
		||||
    result = []
 | 
			
		||||
 | 
			
		||||
@@ -173,7 +174,7 @@ def brute_force(cpv, url):
 | 
			
		||||
        result.append([url, version])
 | 
			
		||||
 | 
			
		||||
        if len(result) > CONFIG['brute-force-false-watermark']:
 | 
			
		||||
            output.einfo("Broken server detected ! Skipping brute force.")
 | 
			
		||||
            euscan.output.einfo("Broken server detected ! Skipping brute force.")
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
        if CONFIG["brute-force-recursive"]:
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,8 @@ import portage
 | 
			
		||||
import urllib2
 | 
			
		||||
import xml.dom.minidom
 | 
			
		||||
 | 
			
		||||
from euscan import helpers, output
 | 
			
		||||
from euscan import helpers
 | 
			
		||||
import euscan
 | 
			
		||||
 | 
			
		||||
def can_handle(cpv, url):
 | 
			
		||||
    if url.startswith('http://pear.php.net/get/'):
 | 
			
		||||
@@ -29,7 +30,7 @@ def scan(cpv, url):
 | 
			
		||||
    orig_url = url
 | 
			
		||||
    url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())
 | 
			
		||||
 | 
			
		||||
    output.einfo("Using: " + url)
 | 
			
		||||
    euscan.output.einfo("Using: " + url)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        fp = helpers.urlopen(url)
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,8 @@ import portage
 | 
			
		||||
import json
 | 
			
		||||
import urllib2
 | 
			
		||||
 | 
			
		||||
from euscan import helpers, output
 | 
			
		||||
from euscan import helpers
 | 
			
		||||
import euscan
 | 
			
		||||
 | 
			
		||||
def can_handle(cpv, url):
 | 
			
		||||
    return url.startswith('mirror://rubygems/')
 | 
			
		||||
@@ -24,7 +25,7 @@ def scan(cpv, url):
 | 
			
		||||
    gem = guess_gem(cpv, url)
 | 
			
		||||
    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
 | 
			
		||||
 | 
			
		||||
    output.einfo("Using: " + url)
 | 
			
		||||
    euscan.output.einfo("Using: " + url)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        fp = helpers.urlopen(url)
 | 
			
		||||
 
 | 
			
		||||
@@ -15,7 +15,8 @@ except ImportError:
 | 
			
		||||
import portage
 | 
			
		||||
from portage import dep
 | 
			
		||||
 | 
			
		||||
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS, output
 | 
			
		||||
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS
 | 
			
		||||
import euscan
 | 
			
		||||
 | 
			
		||||
def htop_vercmp(a, b):
 | 
			
		||||
    def fixver(v):
 | 
			
		||||
@@ -87,7 +88,7 @@ def version_blacklisted(cp, version):
 | 
			
		||||
            None
 | 
			
		||||
 | 
			
		||||
    if rule:
 | 
			
		||||
        output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
 | 
			
		||||
        euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
 | 
			
		||||
    return rule is not None
 | 
			
		||||
 | 
			
		||||
def version_filtered(cp, base, version):
 | 
			
		||||
@@ -254,7 +255,7 @@ def urlallowed(url):
 | 
			
		||||
 | 
			
		||||
def urlopen(url, timeout=None, verb="GET"):
 | 
			
		||||
    if not urlallowed(url):
 | 
			
		||||
        output.einfo("Url '%s' blocked by robots.txt" % url)
 | 
			
		||||
        euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    if not timeout:
 | 
			
		||||
@@ -287,17 +288,17 @@ def tryurl(fileurl, template):
 | 
			
		||||
    result = True
 | 
			
		||||
 | 
			
		||||
    if not urlallowed(fileurl):
 | 
			
		||||
        output.einfo("Url '%s' blocked by robots.txt" % fileurl)
 | 
			
		||||
        euscan.output.einfo("Url '%s' blocked by robots.txt" % fileurl)
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    output.ebegin("Trying: " + fileurl)
 | 
			
		||||
    euscan.output.ebegin("Trying: " + fileurl)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        basename = os.path.basename(fileurl)
 | 
			
		||||
 | 
			
		||||
        fp = urlopen(fileurl, verb='HEAD')
 | 
			
		||||
        if not fp:
 | 
			
		||||
            output.eend(errno.EPERM)
 | 
			
		||||
            euscan.output.eend(errno.EPERM)
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        headers = fp.info()
 | 
			
		||||
@@ -328,7 +329,7 @@ def tryurl(fileurl, template):
 | 
			
		||||
    except IOError:
 | 
			
		||||
        result = None
 | 
			
		||||
 | 
			
		||||
    output.eend(errno.ENOENT if not result else 0)
 | 
			
		||||
    euscan.output.eend(errno.ENOENT if not result else 0)
 | 
			
		||||
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
@@ -383,8 +384,8 @@ def parse_mirror(uri):
 | 
			
		||||
 | 
			
		||||
    eidx = uri.find("/", 9)
 | 
			
		||||
    if eidx == -1:
 | 
			
		||||
        output.einfo("Invalid mirror definition in SRC_URI:\n")
 | 
			
		||||
        output.einfo("  %s\n" % (uri))
 | 
			
		||||
        euscan.output.einfo("Invalid mirror definition in SRC_URI:\n")
 | 
			
		||||
        euscan.output.einfo("  %s\n" % (uri))
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    mirrorname = uri[9:eidx]
 | 
			
		||||
@@ -395,7 +396,7 @@ def parse_mirror(uri):
 | 
			
		||||
        shuffle(mirrors)
 | 
			
		||||
        uri = mirrors[0].strip("/") + "/" + path
 | 
			
		||||
    else:
 | 
			
		||||
        output.einfo("No known mirror by the name: %s\n" % (mirrorname))
 | 
			
		||||
        euscan.output.einfo("No known mirror by the name: %s\n" % (mirrorname))
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    return uri
 | 
			
		||||
 
 | 
			
		||||
@@ -11,10 +11,12 @@ from gentoolkit import errors
 | 
			
		||||
from gentoolkit.query import Query
 | 
			
		||||
from gentoolkit.eclean.search import (port_settings)
 | 
			
		||||
 | 
			
		||||
from euscan import CONFIG, BLACKLIST_PACKAGES, output
 | 
			
		||||
from euscan import CONFIG, BLACKLIST_PACKAGES
 | 
			
		||||
from euscan import handlers
 | 
			
		||||
from euscan import helpers
 | 
			
		||||
 | 
			
		||||
import euscan
 | 
			
		||||
 | 
			
		||||
def filter_versions(cp, versions):
 | 
			
		||||
    filtered = {}
 | 
			
		||||
 | 
			
		||||
@@ -31,18 +33,19 @@ def filter_versions(cp, versions):
 | 
			
		||||
 | 
			
		||||
        filtered[version] = url
 | 
			
		||||
 | 
			
		||||
    return [ (filtered[version], version) for version in filtered ]
 | 
			
		||||
    return [ (cp, filtered[version], version) for version in filtered ]
 | 
			
		||||
 | 
			
		||||
def scan_upstream_urls(cpv, urls):
 | 
			
		||||
    versions = []
 | 
			
		||||
 | 
			
		||||
    for filename in urls:
 | 
			
		||||
        for url in urls[filename]:
 | 
			
		||||
            pp.uprint()
 | 
			
		||||
            output.einfo("SRC_URI is '%s'" % url)
 | 
			
		||||
            if not CONFIG['quiet']:
 | 
			
		||||
                pp.uprint()
 | 
			
		||||
            euscan.output.einfo("SRC_URI is '%s'" % url)
 | 
			
		||||
 | 
			
		||||
            if '://' not in url:
 | 
			
		||||
                output.einfo("Invalid url '%s'" % url)
 | 
			
		||||
                euscan.output.einfo("Invalid url '%s'" % url)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            ''' Try normal scan '''
 | 
			
		||||
@@ -64,57 +67,59 @@ def scan_upstream_urls(cpv, urls):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def scan_upstream(query):
 | 
			
		||||
        matches = Query(query).find(
 | 
			
		||||
                include_masked=True,
 | 
			
		||||
                in_installed=False
 | 
			
		||||
        )
 | 
			
		||||
    matches = Query(query).find(
 | 
			
		||||
        include_masked=True,
 | 
			
		||||
        in_installed=False
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
        if not matches:
 | 
			
		||||
                sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
 | 
			
		||||
                return []
 | 
			
		||||
    if not matches:
 | 
			
		||||
        sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
        matches = sorted(matches)
 | 
			
		||||
    matches = sorted(matches)
 | 
			
		||||
    pkg = matches.pop()
 | 
			
		||||
 | 
			
		||||
    while '9999' in pkg.version and len(matches):
 | 
			
		||||
        pkg = matches.pop()
 | 
			
		||||
 | 
			
		||||
        while '9999' in pkg.version and len(matches):
 | 
			
		||||
            pkg = matches.pop()
 | 
			
		||||
    if not pkg:
 | 
			
		||||
        sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)"
 | 
			
		||||
                                 % pp.pkgquery(pkg.cp)))
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
        if not pkg:
 | 
			
		||||
            sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)))
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
        if pkg.cp in BLACKLIST_PACKAGES:
 | 
			
		||||
                sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
 | 
			
		||||
                return []
 | 
			
		||||
    if pkg.cp in BLACKLIST_PACKAGES:
 | 
			
		||||
        sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
    if not CONFIG['quiet']:
 | 
			
		||||
        pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
 | 
			
		||||
        pp.uprint()
 | 
			
		||||
 | 
			
		||||
        ebuild_path = pkg.ebuild_path()
 | 
			
		||||
        if ebuild_path:
 | 
			
		||||
                pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
 | 
			
		||||
            pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
 | 
			
		||||
 | 
			
		||||
        pp.uprint('Repository: ' + pkg.repo_name())
 | 
			
		||||
        pp.uprint('Homepage: ' + pkg.environment("HOMEPAGE"))
 | 
			
		||||
        pp.uprint('Description: ' + pkg.environment("DESCRIPTION"))
 | 
			
		||||
 | 
			
		||||
        cpv = pkg.cpv
 | 
			
		||||
        metadata = {
 | 
			
		||||
                "EAPI"    : port_settings["EAPI"],
 | 
			
		||||
                "SRC_URI" : pkg.environment("SRC_URI", False),
 | 
			
		||||
        }
 | 
			
		||||
        use = frozenset(port_settings["PORTAGE_USE"].split())
 | 
			
		||||
        try:
 | 
			
		||||
                alist = porttree._parse_uri_map(cpv, metadata, use=use)
 | 
			
		||||
                aalist = porttree._parse_uri_map(cpv, metadata)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
                sys.stderr.write(pp.warn("%s\n" % str(e)))
 | 
			
		||||
                sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
 | 
			
		||||
                return []
 | 
			
		||||
    cpv = pkg.cpv
 | 
			
		||||
    metadata = {
 | 
			
		||||
        "EAPI"    : port_settings["EAPI"],
 | 
			
		||||
        "SRC_URI" : pkg.environment("SRC_URI", False),
 | 
			
		||||
    }
 | 
			
		||||
    use = frozenset(port_settings["PORTAGE_USE"].split())
 | 
			
		||||
    try:
 | 
			
		||||
        alist = porttree._parse_uri_map(cpv, metadata, use=use)
 | 
			
		||||
        aalist = porttree._parse_uri_map(cpv, metadata)
 | 
			
		||||
    except Exception as e:
 | 
			
		||||
        sys.stderr.write(pp.warn("%s\n" % str(e)))
 | 
			
		||||
        sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
        if "mirror" in portage.settings.features:
 | 
			
		||||
                urls = aalist
 | 
			
		||||
        else:
 | 
			
		||||
                urls = alist
 | 
			
		||||
    if "mirror" in portage.settings.features:
 | 
			
		||||
        urls = aalist
 | 
			
		||||
    else:
 | 
			
		||||
        urls = alist
 | 
			
		||||
 | 
			
		||||
        return scan_upstream_urls(pkg.cpv, urls)
 | 
			
		||||
    return scan_upstream_urls(pkg.cpv, urls)
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user