2011-04-01 17:18:21 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
"""Copyright 2011 Gentoo Foundation
|
|
|
|
Distributed under the terms of the GNU General Public License v2
|
|
|
|
"""
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
# Meta:
|
|
|
|
__author__ = "Corentin Chary (iksaif)"
|
|
|
|
__email__ = "corentin.chary@gmail.com"
|
|
|
|
__version__ = "git"
|
|
|
|
__productname__ = "euscan"
|
|
|
|
__description__ = "A tool to detect new upstream releases."
|
|
|
|
|
|
|
|
# =======
|
|
|
|
# Imports
|
|
|
|
# =======
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import re
|
2011-04-12 15:29:08 +02:00
|
|
|
import time
|
|
|
|
import getopt
|
|
|
|
import errno
|
|
|
|
import random
|
2011-04-01 17:18:21 +02:00
|
|
|
import urllib2
|
2011-04-12 15:29:08 +02:00
|
|
|
import StringIO
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
import pkg_resources
|
|
|
|
|
|
|
|
import portage
|
2011-04-20 17:23:45 +02:00
|
|
|
import portage.versions
|
2011-08-28 13:20:40 +02:00
|
|
|
from portage import dep
|
|
|
|
from portage.dbapi import porttree
|
2011-04-12 15:29:08 +02:00
|
|
|
from portage.output import white, yellow, turquoise, green, teal, red, EOutput
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
import gentoolkit.pprinter as pp
|
|
|
|
from gentoolkit import errors
|
|
|
|
from gentoolkit.query import Query
|
|
|
|
from gentoolkit.eclean.search import (port_settings)
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
# =======
|
|
|
|
# Globals
|
|
|
|
# =======
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
QUERY_OPTS = {"include_masked": True}
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
BLACKLIST_VERSIONS = [
|
2011-08-03 11:54:45 +02:00
|
|
|
# Compatibility package for running binaries linked against a pre gcc 3.4 libstdc++, won't be updated
|
2011-08-28 13:20:40 +02:00
|
|
|
'>=sys-libs/libstdc++-v3-3.4',
|
|
|
|
]
|
|
|
|
|
|
|
|
BLACKLIST_PACKAGES = [
|
2011-08-03 11:54:45 +02:00
|
|
|
# These kernels are almost dead
|
|
|
|
'sys-kernel/usermode-sources',
|
|
|
|
'sys-kernel/xbox-sources',
|
|
|
|
'sys-kernel/cell-sources',
|
|
|
|
]
|
|
|
|
|
|
|
|
SCANDIR_BLACKLIST_URLS = [
|
|
|
|
'mirror://rubygems/(.*)', # Not browsable
|
|
|
|
'mirror://gentoo/(.*)' # Directory too big
|
|
|
|
]
|
|
|
|
|
|
|
|
BRUTEFORCE_BLACKLIST_PACKAGES = [
|
|
|
|
'net-zope/plonepopoll' # infinite loop any http://plone.org/products/plonepopoll/releases/*/plonepopoll-2-6-1.tgz link will work
|
|
|
|
]
|
|
|
|
|
|
|
|
BRUTEFORCE_BLACKLIST_URLS = [
|
|
|
|
'http://(.*)dockapps.org/download.php/id/(.*)', # infinite loop
|
2011-08-22 10:47:21 +02:00
|
|
|
'http://hydra.nixos.org/build/(.*)', # infinite loop
|
|
|
|
'http://www.rennings.net/gentoo/distfiles/(.*)' # Doesn't respect 404, infinite loop
|
2011-08-03 11:54:45 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
def htop_vercmp(a, b):
|
|
|
|
def fixver(v):
|
|
|
|
if v in ['0.11', '0.12', '0.13']:
|
|
|
|
v = '0.1.' + v[3:]
|
|
|
|
return v
|
|
|
|
|
|
|
|
return simple_vercmp(fixver(a), fixver(b))
|
|
|
|
|
|
|
|
VERSION_CMP_PACKAGE_QUIRKS = {
|
|
|
|
'sys-process/htop' : htop_vercmp
|
|
|
|
}
|
2011-04-12 15:29:08 +02:00
|
|
|
|
2011-04-20 17:41:43 +02:00
|
|
|
_v = r'((\d+)((\.\d+)*)([a-zA-Z]*?)(((-|_)(pre|p|beta|b|alpha|a|rc|r)\d*)*))'
|
2011-04-19 11:42:06 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
# =========
|
|
|
|
# Functions
|
|
|
|
# =========
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
def cast_int_components(version):
|
|
|
|
for i, obj in enumerate(version):
|
|
|
|
try:
|
|
|
|
version[i] = int(obj)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
return version
|
|
|
|
|
2011-08-03 11:54:45 +02:00
|
|
|
def simple_vercmp(a, b):
|
2011-04-20 17:23:45 +02:00
|
|
|
if a == b:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
# For sane versions
|
|
|
|
r = portage.versions.vercmp(a, b)
|
|
|
|
|
|
|
|
if r is not None:
|
|
|
|
return r
|
|
|
|
|
|
|
|
# Fallback
|
|
|
|
a = pkg_resources.parse_version(a)
|
|
|
|
b = pkg_resources.parse_version(b)
|
|
|
|
|
|
|
|
if a < b:
|
|
|
|
return -1
|
|
|
|
else:
|
|
|
|
return 1
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-08-03 11:54:45 +02:00
|
|
|
def vercmp(package, a, b):
|
|
|
|
if package in VERSION_CMP_PACKAGE_QUIRKS:
|
|
|
|
return VERSION_CMP_PACKAGE_QUIRKS[package](a, b)
|
|
|
|
return simple_vercmp(a, b)
|
|
|
|
|
2011-04-25 22:24:52 +02:00
|
|
|
def skipnightly(a, b):
|
|
|
|
a = pkg_resources.parse_version(a)
|
|
|
|
b = pkg_resources.parse_version(b)
|
|
|
|
|
|
|
|
# Try to skip nightly builds when not wanted (www-apps/moodle)
|
|
|
|
if len(a) != len(b) and len(b) == 2 and len(b[0]) == len('yyyymmdd'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2011-04-19 11:42:06 +02:00
|
|
|
def generate_templates_vars(version):
|
|
|
|
ret = []
|
|
|
|
|
|
|
|
part = split_version(version)
|
|
|
|
for i in range(2, len(part)):
|
|
|
|
ver = []
|
|
|
|
var = []
|
|
|
|
for j in range(i):
|
|
|
|
ver.append(str(part[j]))
|
|
|
|
var.append('${%d}' % j)
|
|
|
|
|
|
|
|
ret.append((".".join(ver), ".".join(var)))
|
|
|
|
ret.append((version, '${PV}'))
|
|
|
|
ret.reverse()
|
|
|
|
return ret
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
def template_from_url(url, version):
|
|
|
|
prefix, chunks = url.split('://')
|
|
|
|
chunks = chunks.split('/')
|
|
|
|
|
|
|
|
for i in range(len(chunks)):
|
|
|
|
chunk = chunks[i]
|
|
|
|
|
2011-04-19 11:42:06 +02:00
|
|
|
subs = generate_templates_vars(version)
|
|
|
|
for sub in subs:
|
|
|
|
chunk = chunk.replace(sub[0], sub[1])
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
chunks[i] = chunk
|
|
|
|
|
2011-04-19 11:42:06 +02:00
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
return prefix + "://" + "/".join(chunks)
|
|
|
|
|
|
|
|
def url_from_template(url, version):
|
|
|
|
components = split_version(version)
|
|
|
|
|
|
|
|
url = url.replace('${PV}', version)
|
|
|
|
for i in range(len(components)):
|
|
|
|
url = url.replace('${%d}' % i, str(components[i]))
|
|
|
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
# Stolen from distutils.LooseVersion
|
|
|
|
# Used for brute force to increment the version
|
|
|
|
def split_version(version):
|
|
|
|
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
|
|
|
components = filter(lambda x: x and x != '.', component_re.split(version))
|
|
|
|
for i in range(len(components)):
|
|
|
|
try:
|
|
|
|
components[i] = int(components[i])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
return components
|
|
|
|
|
|
|
|
def join_version(components):
|
|
|
|
version = ""
|
|
|
|
for i in range(len(components)):
|
|
|
|
version += str(components[i])
|
|
|
|
if i >= len(components) - 1:
|
|
|
|
break
|
|
|
|
if type(components[i]) != str and type(components[i + 1]) != str:
|
|
|
|
version += "."
|
|
|
|
return version
|
|
|
|
|
|
|
|
def increment_version(components, level):
|
|
|
|
n = len(components)
|
|
|
|
|
|
|
|
if level > n - 1 or level < 0:
|
|
|
|
raise Exception
|
|
|
|
|
|
|
|
for i in range(n, level + 1, -1):
|
|
|
|
if type(components[i - 1]) == int:
|
|
|
|
components[i - 1] = 0
|
|
|
|
|
|
|
|
if type(components[level]) == int:
|
|
|
|
components[level] += 1
|
|
|
|
|
|
|
|
return components
|
|
|
|
|
|
|
|
def gen_versions(components, level):
|
|
|
|
n = len(components)
|
|
|
|
depth = level
|
|
|
|
level = min(level, n)
|
|
|
|
|
|
|
|
if not n:
|
|
|
|
return []
|
|
|
|
|
|
|
|
versions = []
|
|
|
|
|
|
|
|
for i in range(n, n - level, -1):
|
|
|
|
increment_version(components, i - 1)
|
|
|
|
for j in range(depth):
|
|
|
|
versions.append(list(components))
|
|
|
|
increment_version(components, i - 1)
|
|
|
|
|
|
|
|
return versions
|
|
|
|
|
2011-04-20 12:04:47 +02:00
|
|
|
def tryurl(fileurl, output, template):
|
|
|
|
result = True
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
output.ebegin("Trying: " + fileurl)
|
|
|
|
|
|
|
|
try:
|
2011-04-18 21:20:45 +02:00
|
|
|
basename = os.path.basename(fileurl)
|
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
fp = urllib2.urlopen(fileurl, None, 5)
|
|
|
|
headers = fp.info()
|
|
|
|
|
|
|
|
if 'Content-disposition' in headers and basename not in headers['Content-disposition']:
|
2011-04-20 12:04:47 +02:00
|
|
|
result = None
|
2011-04-01 17:18:21 +02:00
|
|
|
elif 'Content-Length' in headers and headers['Content-Length'] == '0':
|
2011-04-20 12:04:47 +02:00
|
|
|
result = None
|
2011-04-01 17:18:21 +02:00
|
|
|
elif 'text/html' in headers['Content-Type']:
|
2011-04-20 12:04:47 +02:00
|
|
|
result = None
|
2011-04-18 21:20:45 +02:00
|
|
|
elif fp.geturl() != fileurl:
|
2011-04-20 12:04:47 +02:00
|
|
|
regex = regex_from_template(template)
|
|
|
|
baseregex = regex_from_template(os.path.basename(template))
|
2011-04-18 21:20:45 +02:00
|
|
|
basename2 = os.path.basename(fp.geturl())
|
|
|
|
|
|
|
|
# Redirect to another (earlier?) version
|
2011-04-20 12:04:47 +02:00
|
|
|
if basename != basename2 and (re.match(regex, fp.geturl()) or re.match(baseregex, basename2)):
|
|
|
|
result = None
|
|
|
|
|
|
|
|
|
|
|
|
if result:
|
|
|
|
result = (fp.geturl(), fp.info())
|
2011-08-03 11:54:45 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
except urllib2.URLError:
|
2011-04-20 12:04:47 +02:00
|
|
|
result = None
|
2011-04-14 19:28:38 +02:00
|
|
|
except IOError:
|
2011-04-20 12:04:47 +02:00
|
|
|
result = None
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
output.eend(errno.ENOENT if not result else 0)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def regex_from_template(template):
|
|
|
|
template = re.escape(template)
|
|
|
|
template = template.replace('\$\{', '${')
|
|
|
|
template = template.replace('\}', '}')
|
|
|
|
template = template.replace('}\.$', '}.$')
|
2011-04-19 11:42:06 +02:00
|
|
|
template = template.replace('${1}', r'([\d]+?)')
|
|
|
|
template = re.sub(r'(\$\{\d+\}\.?)+', r'([\w]+?)', template)
|
|
|
|
#template = re.sub(r'(\$\{\d+\}\.?)+', r'([\w]+?)', template)
|
2011-04-01 17:18:21 +02:00
|
|
|
#template = re.sub(r'(\$\{\d+\}\.+)+', '(.+?)\.', template)
|
|
|
|
#template = re.sub(r'(\$\{\d+\})+', '(.+?)', template)
|
2011-04-19 11:42:06 +02:00
|
|
|
template = template.replace('${PV}', _v)
|
2011-04-01 17:18:21 +02:00
|
|
|
template = template + r'/?$'
|
|
|
|
return template
|
|
|
|
|
|
|
|
def basedir_from_template(template):
|
|
|
|
idx = template.find('${')
|
|
|
|
if idx == -1:
|
|
|
|
return template
|
|
|
|
|
|
|
|
idx = template[0:idx].rfind('/')
|
|
|
|
if idx == -1:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
return template[0:idx]
|
|
|
|
|
|
|
|
def generate_scan_paths(url):
|
|
|
|
prefix, chunks = url.split('://')
|
|
|
|
chunks = chunks.split('/')
|
|
|
|
|
|
|
|
steps = []
|
|
|
|
|
|
|
|
path = prefix + ":/"
|
|
|
|
for chunk in chunks:
|
|
|
|
if '${' in chunk:
|
|
|
|
steps.append((path, regex_from_template(chunk)))
|
|
|
|
path = ""
|
|
|
|
else:
|
|
|
|
path += "/"
|
|
|
|
path += chunk
|
|
|
|
return steps
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
def versionBlacklisted(cp, version, output=None):
|
|
|
|
rule = None
|
|
|
|
cpv = '%s-%s' % (cp, version)
|
|
|
|
|
|
|
|
for bv in BLACKLIST_VERSIONS:
|
|
|
|
if dep.match_from_list(bv, [cpv]):
|
|
|
|
rule = bv
|
|
|
|
None
|
|
|
|
|
|
|
|
if rule and output:
|
|
|
|
output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
|
|
|
|
return rule is not None
|
|
|
|
|
2011-08-03 11:54:45 +02:00
|
|
|
def scan_directory_recursive(cpv, url, steps, vmin, vmax, output):
|
2011-04-01 17:18:21 +02:00
|
|
|
if not steps:
|
|
|
|
return []
|
|
|
|
|
2011-08-03 11:54:45 +02:00
|
|
|
cp, ver, rev = portage.pkgsplit(cpv)
|
2011-04-01 17:18:21 +02:00
|
|
|
url += steps[0][0]
|
|
|
|
pattern = steps[0][1]
|
|
|
|
|
|
|
|
steps = steps[1:]
|
|
|
|
|
|
|
|
output.einfo("Scanning: %s" % url)
|
|
|
|
|
|
|
|
try:
|
|
|
|
fp = urllib2.urlopen(url, None, 5)
|
2011-04-12 15:29:08 +02:00
|
|
|
except urllib2.URLError:
|
2011-04-01 17:18:21 +02:00
|
|
|
return []
|
2011-04-14 19:28:38 +02:00
|
|
|
except IOError:
|
|
|
|
return []
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
data = fp.read()
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
if re.search("<\s*a\s+[^>]*href", data):
|
|
|
|
from BeautifulSoup import BeautifulSoup
|
|
|
|
|
|
|
|
soup = BeautifulSoup(data)
|
|
|
|
|
|
|
|
for link in soup.findAll('a'):
|
|
|
|
href = link.get("href")
|
|
|
|
if not href:
|
|
|
|
continue
|
|
|
|
if href.startswith(url):
|
|
|
|
href = href.replace(url, "", 1)
|
|
|
|
|
2011-04-18 21:07:55 +02:00
|
|
|
match = re.match(pattern, href, re.I)
|
2011-04-01 17:18:21 +02:00
|
|
|
if match:
|
|
|
|
results.append((match.group(1), match.group(0)))
|
|
|
|
|
|
|
|
elif url.startswith('ftp://'): # Probably a FTP Server
|
|
|
|
buf = StringIO.StringIO(data)
|
|
|
|
for line in buf.readlines():
|
|
|
|
line = line.replace("\n", "").replace("\r", "")
|
2011-04-18 21:07:55 +02:00
|
|
|
match = re.search(pattern, line, re.I)
|
2011-04-01 17:18:21 +02:00
|
|
|
if match:
|
|
|
|
results.append((match.group(1), match.group(0)))
|
|
|
|
# add url
|
|
|
|
|
|
|
|
versions = []
|
|
|
|
|
|
|
|
for version, path in results:
|
2011-08-03 11:54:45 +02:00
|
|
|
if vmin and vercmp(cp, version, vmin) <= 0:
|
2011-04-01 17:18:21 +02:00
|
|
|
continue
|
2011-08-03 11:54:45 +02:00
|
|
|
if vmax and vercmp(cp, version, vmax) >= 0:
|
2011-04-01 17:18:21 +02:00
|
|
|
continue
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
if versionBlacklisted(cp, version, output):
|
|
|
|
continue
|
|
|
|
|
2011-04-25 22:24:52 +02:00
|
|
|
if skipnightly(vmin, version):
|
2011-04-15 19:28:37 +02:00
|
|
|
continue
|
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
if not url.endswith('/') and not path.startswith('/'):
|
|
|
|
path = url + '/' + path
|
|
|
|
else:
|
|
|
|
path = url + path
|
|
|
|
|
|
|
|
versions.append((path, version))
|
|
|
|
if steps:
|
2011-08-22 10:47:21 +02:00
|
|
|
ret = scan_directory_recursive(cpv, path, steps, vmin, vmax, output)
|
2011-04-01 17:18:21 +02:00
|
|
|
versions.extend(ret)
|
|
|
|
return versions
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
'''
|
|
|
|
- python: PyPi
|
|
|
|
- PHP: PECL / PEAR
|
|
|
|
- ftp.kde.org: doesn't scan the "unstable" tree
|
|
|
|
- mysql: should use http://downloads.mysql.com/archives/
|
|
|
|
- mariadb: should use http://downloads.askmonty.org/MariaDB/+releases/
|
|
|
|
'''
|
|
|
|
|
|
|
|
def scan_directory(cpv, url, options, output, limit=None):
|
2011-04-01 17:18:21 +02:00
|
|
|
# Ftp: list dir
|
|
|
|
# Handle mirrors
|
2011-04-12 15:29:08 +02:00
|
|
|
if not options["scan-dir"]:
|
2011-04-01 17:18:21 +02:00
|
|
|
return []
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
for bu in SCANDIR_BLACKLIST_URLS:
|
|
|
|
if re.match(bu, url):
|
|
|
|
output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
|
|
|
return []
|
|
|
|
|
|
|
|
resolved_url = parseMirror(url, output)
|
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
catpkg, ver, rev = portage.pkgsplit(cpv)
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
template = template_from_url(resolved_url, ver)
|
2011-04-01 17:18:21 +02:00
|
|
|
if '${' not in template:
|
2011-04-14 19:28:38 +02:00
|
|
|
output.einfo("Url doesn't seems to depend on version: %s not found in %s"
|
2011-04-01 17:18:21 +02:00
|
|
|
% (ver, fileurl))
|
|
|
|
return []
|
|
|
|
else:
|
|
|
|
output.einfo("Scanning: %s" % template)
|
|
|
|
|
|
|
|
steps = generate_scan_paths(template)
|
2011-08-03 11:54:45 +02:00
|
|
|
return scan_directory_recursive(cpv, "", steps, ver, limit, output)
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
def brute_force(cpv, fileurl, options, output, limit=None):
|
|
|
|
if options["brute-force"] <= 0:
|
2011-04-01 17:18:21 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
catpkg, ver, rev = portage.pkgsplit(cpv)
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
|
|
|
if re.match(bp, catpkg):
|
2011-04-14 19:28:38 +02:00
|
|
|
output.einfo("%s is blacklisted by rule %s" % (catpkg, bp))
|
2011-04-12 15:29:08 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
|
|
|
if re.match(bp, fileurl):
|
2011-04-14 19:28:38 +02:00
|
|
|
output.einfo("%s is blacklisted by rule %s" % (catpkg, bp))
|
2011-04-12 15:29:08 +02:00
|
|
|
return []
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
output.einfo("Generating version from " + ver)
|
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
components = split_version(ver)
|
2011-04-12 15:29:08 +02:00
|
|
|
versions = gen_versions(components, options["brute-force"])
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-08-03 11:54:45 +02:00
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
""" Remove unwanted versions """
|
2011-08-03 11:54:45 +02:00
|
|
|
for v in versions:
|
|
|
|
if vercmp(catpkg, ver, join_version(v)) >= 0:
|
|
|
|
versions.remove(v)
|
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
if not versions:
|
2011-04-14 19:28:38 +02:00
|
|
|
output.einfo("Can't generate new versions from " + ver)
|
2011-04-01 17:18:21 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
template = template_from_url(fileurl, ver)
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if '${PV}' not in template:
|
2011-04-14 19:28:38 +02:00
|
|
|
output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
|
2011-04-12 15:29:08 +02:00
|
|
|
% (ver, fileurl))
|
2011-04-01 17:18:21 +02:00
|
|
|
return []
|
|
|
|
else:
|
|
|
|
output.einfo("Brute forcing: %s" % template)
|
|
|
|
|
|
|
|
result = []
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
done = []
|
2011-04-12 15:29:08 +02:00
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
while i < len(versions):
|
|
|
|
components = versions[i]
|
|
|
|
i += 1
|
|
|
|
if components in done:
|
|
|
|
continue
|
|
|
|
done.append(tuple(components))
|
|
|
|
|
|
|
|
vstring = join_version(components)
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
if versionBlacklisted(catpkg, vstring, output):
|
|
|
|
continue
|
|
|
|
|
2011-08-03 11:54:45 +02:00
|
|
|
if limit and vercmp(catpkg, vstring, limit) >= 0:
|
2011-04-01 17:18:21 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
url = url_from_template(template, vstring)
|
|
|
|
|
2011-04-20 12:04:47 +02:00
|
|
|
infos = tryurl(url, output, template)
|
|
|
|
|
|
|
|
if not infos:
|
2011-04-01 17:18:21 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
result.append([url, vstring])
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if options["brute-force-recursive"]:
|
|
|
|
for v in gen_versions(components, options["brute-force"]):
|
2011-04-01 17:18:21 +02:00
|
|
|
if v not in versions and tuple(v) not in done:
|
|
|
|
versions.append(v)
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if options["oneshot"]:
|
2011-04-01 17:18:21 +02:00
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
def parseMirror(uri, output):
|
2011-04-14 19:28:38 +02:00
|
|
|
from random import shuffle
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
mirrors = portage.settings.thirdpartymirrors()
|
|
|
|
|
|
|
|
if not uri.startswith("mirror://"):
|
|
|
|
return uri
|
|
|
|
|
|
|
|
eidx = uri.find("/", 9)
|
|
|
|
if eidx == -1:
|
2011-04-14 19:28:38 +02:00
|
|
|
output.einfo("Invalid mirror definition in SRC_URI:\n")
|
|
|
|
output.einfo(" %s\n" % (uri))
|
2011-04-12 15:29:08 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
mirrorname = uri[9:eidx]
|
|
|
|
path = uri[eidx+1:]
|
|
|
|
|
|
|
|
if mirrorname in mirrors:
|
2011-04-14 19:28:38 +02:00
|
|
|
mirrors = mirrors[mirrorname]
|
|
|
|
shuffle(mirrors)
|
|
|
|
uri = mirrors[0].strip("/") + "/" + path
|
2011-04-01 17:18:21 +02:00
|
|
|
else:
|
2011-04-14 19:28:38 +02:00
|
|
|
output.einfo("No known mirror by the name: %s\n" % (mirrorname))
|
2011-04-12 15:29:08 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
return uri
|
|
|
|
|
|
|
|
def setupSignals():
|
|
|
|
""" This block ensures that ^C interrupts are handled quietly. """
|
|
|
|
import signal
|
|
|
|
|
|
|
|
def exithandler(signum,frame):
|
|
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
|
|
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
|
|
|
print ()
|
|
|
|
sys.exit(errno.EINTR)
|
|
|
|
|
|
|
|
signal.signal(signal.SIGINT, exithandler)
|
|
|
|
signal.signal(signal.SIGTERM, exithandler)
|
|
|
|
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
|
|
|
|
|
|
|
|
|
|
|
def printVersion():
|
|
|
|
"""Output the version info."""
|
|
|
|
print( "%s (%s) - %s" \
|
|
|
|
% (__productname__, __version__, __description__))
|
|
|
|
print()
|
|
|
|
print("Author: %s <%s>" % (__author__,__email__))
|
|
|
|
print("Copyright 2011 Gentoo Foundation")
|
|
|
|
print("Distributed under the terms of the GNU General Public License v2")
|
|
|
|
|
|
|
|
|
|
|
|
def printUsage(_error=None, help=None):
|
|
|
|
"""Print help message. May also print partial help to stderr if an
|
|
|
|
error from {'options'} is specified."""
|
|
|
|
|
|
|
|
out = sys.stdout
|
|
|
|
if _error:
|
|
|
|
out = sys.stderr
|
|
|
|
if not _error in ('global-options', 'packages',):
|
|
|
|
_error = None
|
|
|
|
if not _error and not help: help = 'all'
|
|
|
|
if _error in ('global-options',):
|
|
|
|
print( pp.error("Wrong option on command line."), file=out)
|
|
|
|
print( file=out)
|
|
|
|
if _error in ('packages',):
|
|
|
|
print( pp.error("You need to specify exactly one package."), file=out)
|
|
|
|
print( file=out)
|
|
|
|
print( white("Usage:"), file=out)
|
|
|
|
if _error in ('global-options', 'packages',) or help == 'all':
|
|
|
|
print( " "+turquoise(__productname__),
|
|
|
|
yellow("[options]"),
|
|
|
|
green("<package>"), file=out)
|
|
|
|
if _error in ('global-options',) or help == 'all':
|
|
|
|
print( " "+turquoise(__productname__),
|
|
|
|
yellow("[--help, --version]"), file=out)
|
|
|
|
|
|
|
|
print(file=out)
|
|
|
|
if _error in ('global-options',) or help:
|
|
|
|
print( "Available ", yellow("options")+":", file=out)
|
|
|
|
print( yellow(" -C, --nocolor")+
|
|
|
|
" - turn off colors on output", file=out)
|
|
|
|
print( yellow(" -q, --quiet")+
|
|
|
|
" - be as quiet as possible", file=out)
|
|
|
|
print( yellow(" -h, --help")+ \
|
|
|
|
" - display the help screen", file=out)
|
|
|
|
print( yellow(" -V, --version")+
|
|
|
|
" - display version info", file=out)
|
|
|
|
print( file=out)
|
|
|
|
print( yellow(" -1, --oneshot")+
|
|
|
|
" - stop as soon as a new version is found", file=out)
|
|
|
|
print( yellow(" -b, --brute-force=<level>")+
|
|
|
|
" - define the brute force "+yellow("<level>")+" (default: 2)\n" +
|
|
|
|
" " * 29 + "bigger levels will generate more versions numbers\n" +
|
|
|
|
" " * 29 + "0 means disabled", file=out)
|
|
|
|
print( file=out)
|
|
|
|
if _error in ('packages',) or help:
|
|
|
|
print( green(" package")+
|
|
|
|
" - the package (or ebuild) you want to scan", file=out)
|
|
|
|
print( file=out)
|
|
|
|
#print( "More detailed instruction can be found in",
|
|
|
|
# turquoise("`man %s`" % __productname__), file=out)
|
|
|
|
|
|
|
|
|
|
|
|
class ParseArgsException(Exception):
|
|
|
|
"""For parseArgs() -> main() communications."""
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value # sdfgsdfsdfsd
|
|
|
|
def __str__(self):
|
|
|
|
return repr(self.value)
|
|
|
|
|
|
|
|
|
|
|
|
def parseArgs(options={}):
|
|
|
|
"""Parse the command line arguments. Raise exceptions on
|
|
|
|
errors. Returns package and affect the options dict.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def optionSwitch(option,opts):
|
|
|
|
"""local function for interpreting command line options
|
|
|
|
and setting options accordingly"""
|
|
|
|
return_code = True
|
|
|
|
for o, a in opts:
|
|
|
|
if o in ("-h", "--help"):
|
|
|
|
raise ParseArgsException('help')
|
|
|
|
elif o in ("-V", "--version"):
|
|
|
|
raise ParseArgsException('version')
|
|
|
|
elif o in ("-C", "--nocolor"):
|
|
|
|
options['nocolor'] = True
|
|
|
|
pp.output.nocolor()
|
|
|
|
elif o in ("-q", "--quiet"):
|
|
|
|
options['quiet'] = True
|
|
|
|
options['verbose'] = False
|
|
|
|
elif o in ("-1", "--oneshot"):
|
|
|
|
options['oneshot'] = True
|
|
|
|
elif o in ("-b", "--brute-force"):
|
|
|
|
options['brute-force'] = int(a)
|
|
|
|
elif o in ("-v", "--verbose") and not options['quiet']:
|
|
|
|
options['verbose'] = True
|
|
|
|
else:
|
|
|
|
return_code = False
|
|
|
|
|
|
|
|
return return_code
|
|
|
|
|
|
|
|
# here are the different allowed command line options (getopt args)
|
|
|
|
getopt_options = {'short':{}, 'long':{}}
|
|
|
|
getopt_options['short']['global'] = "hVCqv1b:"
|
|
|
|
getopt_options['long']['global'] = ["help", "version", "nocolor", "quiet",
|
|
|
|
"verbose", "oneshot", "brute-force="]
|
|
|
|
# set default options, except 'nocolor', which is set in main()
|
|
|
|
options['quiet'] = False
|
|
|
|
options['verbose'] = False
|
|
|
|
options['brute-force'] = 2
|
|
|
|
options['oneshot'] = False
|
|
|
|
options['brute-force-recursive'] = True # FIXME add an option
|
|
|
|
options['scan-dir'] = True # FIXME add an option
|
|
|
|
|
|
|
|
short_opts = getopt_options['short']['global']
|
|
|
|
long_opts = getopt_options['long']['global']
|
|
|
|
opts_mode = 'global'
|
|
|
|
|
|
|
|
# apply getopts to command line, show partial help on failure
|
|
|
|
try:
|
|
|
|
opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
|
|
|
|
except:
|
|
|
|
raise ParseArgsException(opts_mode+'-options')
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
# set options accordingly
|
|
|
|
optionSwitch(options,opts)
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if len(args) != 1:
|
|
|
|
raise ParseArgsException('packages')
|
|
|
|
|
|
|
|
return args[0]
|
|
|
|
|
|
|
|
def scanUpstream(options, package, output):
|
|
|
|
matches = Query(package).find(
|
|
|
|
include_masked=QUERY_OPTS['include_masked'],
|
|
|
|
in_installed=False
|
|
|
|
)
|
|
|
|
|
|
|
|
if not matches:
|
|
|
|
sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(package)))
|
|
|
|
sys.exit(errno.ENOENT)
|
|
|
|
|
|
|
|
matches = sorted(matches)
|
|
|
|
pkg = matches.pop()
|
2011-04-15 19:28:37 +02:00
|
|
|
|
2011-04-25 22:24:52 +02:00
|
|
|
if '9999' in pkg.version:
|
2011-04-14 19:28:38 +02:00
|
|
|
if len(matches) == 0:
|
|
|
|
sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(package)))
|
|
|
|
sys.exit(errno.ENOENT)
|
|
|
|
else:
|
|
|
|
pkg = matches.pop()
|
2011-04-12 15:29:08 +02:00
|
|
|
|
2011-04-15 19:28:37 +02:00
|
|
|
if pkg.cp in BLACKLIST_PACKAGES:
|
|
|
|
sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(package)))
|
|
|
|
sys.exit(errno.ENOENT)
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
|
|
|
|
pp.uprint()
|
|
|
|
|
|
|
|
ebuild_path = pkg.ebuild_path()
|
|
|
|
if ebuild_path:
|
|
|
|
pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
|
|
|
|
|
|
|
|
pp.uprint('Repository: ' + pkg.repo_name())
|
|
|
|
pp.uprint('Homepage: ' + pkg.environment("HOMEPAGE"))
|
|
|
|
pp.uprint('Description: ' + pkg.environment("DESCRIPTION"))
|
|
|
|
|
|
|
|
cpv = pkg.cpv
|
2011-04-14 19:28:38 +02:00
|
|
|
metadata = {
|
2011-04-12 15:29:08 +02:00
|
|
|
"EAPI" : port_settings["EAPI"],
|
|
|
|
"SRC_URI" : pkg.environment("SRC_URI", False),
|
|
|
|
}
|
|
|
|
use = frozenset(port_settings["PORTAGE_USE"].split())
|
2011-04-01 17:18:21 +02:00
|
|
|
try:
|
2011-08-28 13:20:40 +02:00
|
|
|
alist = porttree._parse_uri_map(cpv, metadata, use=use)
|
|
|
|
aalist = porttree._parse_uri_map(cpv, metadata)
|
2011-04-01 17:18:21 +02:00
|
|
|
except InvalidDependString as e:
|
2011-04-12 15:29:08 +02:00
|
|
|
sys.stderr.write(pp.warn("%s\n" % str(e)))
|
|
|
|
sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
|
|
|
|
sys.exit(errno.ENOENT)
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
if "mirror" in portage.settings.features:
|
|
|
|
fetchme = aalist
|
|
|
|
else:
|
|
|
|
fetchme = alist
|
|
|
|
|
|
|
|
versions = []
|
|
|
|
|
|
|
|
for filename in fetchme:
|
2011-08-28 13:20:40 +02:00
|
|
|
for url in fetchme[filename]:
|
|
|
|
print ()
|
|
|
|
output.einfo("SRC_URI is '%s'" % url)
|
2011-04-18 21:07:55 +02:00
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
if '://' not in url:
|
|
|
|
output.einfo("Invalid url '%s'" % url)
|
2011-04-14 19:28:38 +02:00
|
|
|
continue
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
''' Try normal scan '''
|
|
|
|
versions.extend(scan_directory(cpv, url, options, output))
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if versions and options['oneshot']:
|
2011-04-01 17:18:21 +02:00
|
|
|
break
|
|
|
|
|
2011-08-28 13:20:40 +02:00
|
|
|
''' Brute Force '''
|
2011-04-14 19:28:38 +02:00
|
|
|
versions.extend(brute_force(cpv, url, options, output))
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if versions and options['oneshot']:
|
2011-04-01 17:18:21 +02:00
|
|
|
break
|
|
|
|
|
|
|
|
newversions = {}
|
|
|
|
|
|
|
|
for url, version in versions:
|
2011-08-28 13:20:40 +02:00
|
|
|
''' Try to keep the most specific urls (determinted by the length) '''
|
2011-04-01 17:18:21 +02:00
|
|
|
if version in newversions and len(url) < len(newversions[version]):
|
|
|
|
continue
|
2011-08-28 13:20:40 +02:00
|
|
|
''' Remove blacklisted versions '''
|
|
|
|
if versionBlacklisted(pkg.cp, version, output):
|
|
|
|
continue
|
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
newversions[version] = url
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
print ()
|
|
|
|
|
2011-04-01 17:18:21 +02:00
|
|
|
for version in newversions:
|
2011-08-28 13:20:40 +02:00
|
|
|
print ("Upstream Version:"
|
|
|
|
+ pp.number("%s" % version)
|
|
|
|
+ pp.path(" %s" % newversions[version]))
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if not len(newversions):
|
2011-08-28 13:20:40 +02:00
|
|
|
print (pp.warn("Didn't find any new version,"
|
|
|
|
+ "check package's homepage for "
|
|
|
|
+ "more informations"));
|
2011-04-01 17:18:21 +02:00
|
|
|
return versions
|
|
|
|
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
def main():
|
|
|
|
"""Parse command line and execute all actions."""
|
|
|
|
# set default options
|
|
|
|
options = {}
|
|
|
|
options['nocolor'] = (port_settings["NOCOLOR"] in ('yes','true')
|
|
|
|
or not sys.stdout.isatty())
|
|
|
|
if options['nocolor']:
|
|
|
|
pp.output.nocolor()
|
|
|
|
# parse command line options and actions
|
|
|
|
try:
|
|
|
|
package = parseArgs(options)
|
|
|
|
# filter exception to know what message to display
|
|
|
|
except ParseArgsException as e:
|
|
|
|
if e.value == 'help':
|
|
|
|
printUsage(help='all')
|
|
|
|
sys.exit(0)
|
|
|
|
elif e.value[:5] == 'help-':
|
|
|
|
printUsage(help=e.value[5:])
|
|
|
|
sys.exit(0)
|
|
|
|
elif e.value == 'version':
|
|
|
|
printVersion()
|
|
|
|
sys.exit(0)
|
|
|
|
else:
|
|
|
|
printUsage(e.value)
|
|
|
|
sys.exit(errno.EINVAL)
|
2011-04-01 17:18:21 +02:00
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
output = EOutput(options['quiet'])
|
|
|
|
scanUpstream(options, package, output)
|
2011-04-01 17:18:21 +02:00
|
|
|
|
|
|
|
|
2011-04-12 15:29:08 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
try:
|
|
|
|
setupSignals()
|
|
|
|
main()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print( "Aborted.")
|
|
|
|
sys.exit(errno.EINTR)
|
|
|
|
sys.exit(0)
|