Compare commits
9 Commits
euscan-0.1
...
euscan-0.1
Author | SHA1 | Date | |
---|---|---|---|
2d81d0a0be | |||
333c535230 | |||
72e43ac4fe | |||
2bd012ef82 | |||
b78e73038d | |||
d673c00e12 | |||
fef31c514c | |||
0e6c18b84b | |||
3569f1bb97 |
@ -2,6 +2,14 @@
|
||||
Change history
|
||||
================
|
||||
|
||||
.. _version-0.2.0:
|
||||
|
||||
0.2.0
|
||||
=====
|
||||
:release-date: ???
|
||||
|
||||
* Better --quiet mode
|
||||
|
||||
.. _version-0.1.0:
|
||||
|
||||
0.1.0
|
||||
|
13
TODO
13
TODO
@ -18,12 +18,12 @@ Site Handlers
|
||||
euscanwww
|
||||
---------
|
||||
|
||||
- add last scan in the footer
|
||||
- add last scan in the footer [0.2.0]
|
||||
- rss scan world + post ?
|
||||
- add an /about/config page that describe the current config (overlays, stuff in make.conf, euscan default settings, etc..)
|
||||
- add a table for overlays, change euscan_versions
|
||||
- add an /about/config page that describe the current config (overlays, stuff in make.conf, euscan default settings, etc..) [0.2.0]
|
||||
- add a table for overlays, change euscan_versions [0.2.0]
|
||||
|
||||
### Commands
|
||||
### Commands [0.2.0]
|
||||
|
||||
- euscan_init: init local tree (portage, layman, eix)
|
||||
- euscan_addoverlay: add a layman overlay
|
||||
@ -33,9 +33,12 @@ euscanwww
|
||||
|
||||
### Scan process
|
||||
|
||||
- Move to celery:
|
||||
- Move to celery: [0.2.0]
|
||||
- periodic task for emerge --sync; layman -S; eix-update etc... (every day ?)
|
||||
- periodic task for euscan (once every week, groupped by category or package ?)
|
||||
|
||||
This would allow easier on-demand refresh from web interface
|
||||
|
||||
### API
|
||||
|
||||
- Move to tastypie: [0.3.0]
|
||||
|
42
bin/euscan
42
bin/euscan
@ -29,7 +29,8 @@ from gentoolkit import pprinter as pp
|
||||
from gentoolkit.eclean.search import (port_settings)
|
||||
from gentoolkit.errors import GentoolkitException
|
||||
|
||||
from euscan import CONFIG, output
|
||||
import euscan
|
||||
from euscan import CONFIG
|
||||
from euscan.scan import scan_upstream
|
||||
|
||||
""" Globals """
|
||||
@ -198,7 +199,7 @@ def main():
|
||||
sys.exit(errno.EINVAL)
|
||||
|
||||
""" Change euscan's output """
|
||||
output = EOutput(CONFIG['quiet'])
|
||||
euscan.output = EOutput(CONFIG['quiet'])
|
||||
if CONFIG['verbose'] > 2:
|
||||
httplib.HTTPConnection.debuglevel = 1
|
||||
|
||||
@ -208,29 +209,34 @@ def main():
|
||||
try:
|
||||
ret = scan_upstream(package)
|
||||
except AmbiguousPackageName as e:
|
||||
pkgs = e.args[0]
|
||||
for candidate in pkgs:
|
||||
print(candidate)
|
||||
pkgs = e.args[0]
|
||||
for candidate in pkgs:
|
||||
print(candidate)
|
||||
|
||||
from os.path import basename # To get the short name
|
||||
from os.path import basename # To get the short name
|
||||
|
||||
print(file=sys.stderr)
|
||||
print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
|
||||
file=sys.stderr, end="")
|
||||
pp.die(1, "one of the above fully-qualified ebuild names instead.")
|
||||
print(file=sys.stderr)
|
||||
print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
|
||||
file=sys.stderr, end="")
|
||||
pp.die(1, "one of the above fully-qualified ebuild names instead.")
|
||||
except GentoolkitException as err:
|
||||
pp.die(1, str(err))
|
||||
pp.die(1, '%s: %s' % (package, str(err)))
|
||||
except Exception as err:
|
||||
pp.die(1, str(err))
|
||||
pp.die(1, '%s: %s' % (package, str(err)))
|
||||
|
||||
print ()
|
||||
if not CONFIG['quiet']:
|
||||
print ()
|
||||
|
||||
for url, version in ret:
|
||||
print ("Upstream Version: "
|
||||
+ pp.number("%s" % version)
|
||||
+ pp.path(" %s" % url))
|
||||
for cp, url, version in ret:
|
||||
if not CONFIG['quiet']:
|
||||
print ("Upstream Version: "
|
||||
+ pp.number("%s" % version)
|
||||
+ pp.path(" %s" % url))
|
||||
else:
|
||||
print (pp.cpv("%s-%s" % (cp, version))
|
||||
+ ": " + pp.path(url))
|
||||
|
||||
if not len(ret):
|
||||
if not len(ret) and not CONFIG['quiet']:
|
||||
print (pp.warn("Didn't find any new version, "
|
||||
+ "check package's homepage for "
|
||||
+ "more informations"));
|
||||
|
@ -60,7 +60,7 @@ def herd(request, herd):
|
||||
@render_to('euscan/maintainers.html')
|
||||
def maintainers(request):
|
||||
maintainers = Package.objects.filter(maintainers__isnull=False)
|
||||
maintainers = maintainers.values('maintainers__id', 'maintainers__name')
|
||||
maintainers = maintainers.values('maintainers__id', 'maintainers__name', 'maintainers__email')
|
||||
maintainers = maintainers.annotate(n_packaged=Sum('n_packaged'),
|
||||
n_overlay=Sum('n_overlay'),
|
||||
n_versions=Sum('n_versions'))
|
||||
|
@ -36,21 +36,23 @@ MANAGERS = ADMINS
|
||||
},
|
||||
"""
|
||||
|
||||
EUSCAN_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(os.path.dirname( __file__ ), 'euscan.db')
|
||||
'NAME': os.path.join(EUSCAN_ROOT, 'euscan.db')
|
||||
},
|
||||
}
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
|
||||
'LOCATION': os.path.join(os.path.dirname( __file__ ), 'euscan.cache'),
|
||||
'LOCATION': os.path.join(EUSCAN_ROOT, 'euscan.cache'),
|
||||
}
|
||||
}
|
||||
|
||||
RRD_ROOT = os.path.join(os.path.dirname( __file__ ), 'rrd')
|
||||
RRD_ROOT = os.path.join(EUSCAN_ROOT, 'rrd')
|
||||
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
@ -77,7 +79,7 @@ USE_L10N = True
|
||||
|
||||
# Absolute filesystem path to the directory that will hold user-uploaded files.
|
||||
# Example: "/home/media/media.lawrence.com/"
|
||||
MEDIA_ROOT = os.path.join(os.path.dirname( __file__ ), 'media/')
|
||||
MEDIA_ROOT = os.path.join(EUSCAN_ROOT, 'media/')
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
@ -117,7 +119,7 @@ ROOT_URLCONF = 'euscanwww.urls'
|
||||
FORCE_SCRIPT_NAME=""
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
os.path.join(os.path.dirname( __file__ ), 'templates'),
|
||||
os.path.join(EUSCAN_ROOT, 'templates'),
|
||||
)
|
||||
|
||||
INSTALLED_APPS = (
|
||||
|
@ -20,7 +20,11 @@
|
||||
<tr>
|
||||
<td>
|
||||
<a href="{% url euscan.views.maintainer maintainer.maintainers__id %}">
|
||||
{% if maintainer.maintainers__name != maintainer.maintainers__email %}
|
||||
{{ maintainer.maintainers__name }} <{{ maintainer.maintainers__email }}>
|
||||
{% else %}
|
||||
{{ maintainer.maintainers__name }}
|
||||
{% endif %}
|
||||
</a>
|
||||
{% package_bar maintainer %}
|
||||
</td>
|
||||
|
@ -1,9 +1,6 @@
|
||||
from euscan.handlers import generic
|
||||
from euscan.handlers import php
|
||||
from euscan.handlers import pypi
|
||||
from euscan.handlers import rubygem
|
||||
from euscan.handlers import generic, php, pypi, rubygem, kde
|
||||
|
||||
handlers = [ php, pypi, rubygem, generic ]
|
||||
handlers = [ kde, php, pypi, rubygem, generic ]
|
||||
|
||||
def find_best_handler(cpv, url):
|
||||
for handler in handlers:
|
||||
|
@ -6,8 +6,9 @@ from BeautifulSoup import BeautifulSoup
|
||||
|
||||
import portage
|
||||
|
||||
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS, output
|
||||
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
|
||||
from euscan import helpers
|
||||
import euscan
|
||||
|
||||
def scan_html(data, url, pattern):
|
||||
soup = BeautifulSoup(data)
|
||||
@ -17,6 +18,7 @@ def scan_html(data, url, pattern):
|
||||
href = link.get("href")
|
||||
if not href:
|
||||
continue
|
||||
|
||||
if href.startswith(url):
|
||||
href = href.replace(url, "", 1)
|
||||
|
||||
@ -48,7 +50,7 @@ def scan_directory_recursive(cpv, url, steps):
|
||||
|
||||
steps = steps[1:]
|
||||
|
||||
output.einfo("Scanning: %s" % url)
|
||||
euscan.output.einfo("Scanning: %s" % url)
|
||||
|
||||
try:
|
||||
fp = helpers.urlopen(url)
|
||||
@ -91,20 +93,22 @@ def scan_directory_recursive(cpv, url, steps):
|
||||
def scan(cpv, url):
|
||||
for bu in SCANDIR_BLACKLIST_URLS:
|
||||
if re.match(bu, url):
|
||||
output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
||||
euscan.output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
||||
return []
|
||||
|
||||
resolved_url = helpers.parse_mirror(url)
|
||||
if not resolved_url:
|
||||
return []
|
||||
|
||||
cp, ver, rev = portage.pkgsplit(cpv)
|
||||
|
||||
template = helpers.template_from_url(resolved_url, ver)
|
||||
if '${' not in template:
|
||||
output.einfo("Url doesn't seems to depend on version: %s not found in %s"
|
||||
euscan.output.einfo("Url doesn't seems to depend on version: %s not found in %s"
|
||||
% (ver, resolved_url))
|
||||
return []
|
||||
else:
|
||||
output.einfo("Scanning: %s" % template)
|
||||
euscan.output.einfo("Scanning: %s" % template)
|
||||
|
||||
steps = helpers.generate_scan_paths(template)
|
||||
return scan_directory_recursive(cpv, "", steps)
|
||||
@ -113,18 +117,20 @@ def brute_force(cpv, url):
|
||||
cp, ver, rev = portage.pkgsplit(cpv)
|
||||
|
||||
url = helpers.parse_mirror(url)
|
||||
if not url:
|
||||
return []
|
||||
|
||||
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
||||
if re.match(bp, cp):
|
||||
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||
euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||
return []
|
||||
|
||||
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
||||
if re.match(bp, url):
|
||||
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||
euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
||||
return []
|
||||
|
||||
output.einfo("Generating version from " + ver)
|
||||
euscan.output.einfo("Generating version from " + ver)
|
||||
|
||||
components = helpers.split_version(ver)
|
||||
versions = helpers.gen_versions(components, CONFIG["brute-force"])
|
||||
@ -135,17 +141,17 @@ def brute_force(cpv, url):
|
||||
versions.remove(v)
|
||||
|
||||
if not versions:
|
||||
output.einfo("Can't generate new versions from " + ver)
|
||||
euscan.output.einfo("Can't generate new versions from " + ver)
|
||||
return []
|
||||
|
||||
template = helpers.template_from_url(url, ver)
|
||||
|
||||
if '${PV}' not in template:
|
||||
output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
|
||||
euscan.output.einfo("Url doesn't seems to depend on full version: %s not found in %s"
|
||||
% (ver, url))
|
||||
return []
|
||||
else:
|
||||
output.einfo("Brute forcing: %s" % template)
|
||||
euscan.output.einfo("Brute forcing: %s" % template)
|
||||
|
||||
result = []
|
||||
|
||||
@ -173,7 +179,7 @@ def brute_force(cpv, url):
|
||||
result.append([url, version])
|
||||
|
||||
if len(result) > CONFIG['brute-force-false-watermark']:
|
||||
output.einfo("Broken server detected ! Skipping brute force.")
|
||||
euscan.output.einfo("Broken server detected ! Skipping brute force.")
|
||||
return []
|
||||
|
||||
if CONFIG["brute-force-recursive"]:
|
||||
|
34
pym/euscan/handlers/kde.py
Normal file
34
pym/euscan/handlers/kde.py
Normal file
@ -0,0 +1,34 @@
|
||||
from euscan.handlers import generic
|
||||
|
||||
def can_handle(cpv, url):
|
||||
if url.startswith('mirror://kde/'):
|
||||
return True
|
||||
return False
|
||||
|
||||
def clean_results(results):
|
||||
ret = []
|
||||
|
||||
for path, version in results:
|
||||
if version == '5SUMS':
|
||||
continue
|
||||
ret.append((path, version))
|
||||
|
||||
return ret
|
||||
|
||||
def scan(cpv, url):
|
||||
results = generic.scan(cpv, url)
|
||||
|
||||
if url.startswith('mirror://kde/unstable/'):
|
||||
url = url.replace('mirror://kde/unstable/', 'mirror://kde/stable/')
|
||||
results += generic.scan(cpv, url)
|
||||
|
||||
return clean_results(results)
|
||||
|
||||
def brute_force(cpv, url):
|
||||
results = generic.brute_force(cpv, url)
|
||||
|
||||
if url.startswith('mirror://kde/unstable/'):
|
||||
url = url.replace('mirror://kde/unstable/', 'mirror://kde/stable/')
|
||||
results += generic.brute_force(cpv, url)
|
||||
|
||||
return clean_results(results)
|
@ -3,7 +3,8 @@ import portage
|
||||
import urllib2
|
||||
import xml.dom.minidom
|
||||
|
||||
from euscan import helpers, output
|
||||
from euscan import helpers
|
||||
import euscan
|
||||
|
||||
def can_handle(cpv, url):
|
||||
if url.startswith('http://pear.php.net/get/'):
|
||||
@ -29,7 +30,7 @@ def scan(cpv, url):
|
||||
orig_url = url
|
||||
url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())
|
||||
|
||||
output.einfo("Using: " + url)
|
||||
euscan.output.einfo("Using: " + url)
|
||||
|
||||
try:
|
||||
fp = helpers.urlopen(url)
|
||||
|
@ -4,7 +4,8 @@ import re
|
||||
|
||||
import portage
|
||||
|
||||
from euscan import helpers, output
|
||||
from euscan import helpers
|
||||
import euscan
|
||||
|
||||
def can_handle(cpv, url):
|
||||
return url.startswith('mirror://pypi/')
|
||||
@ -24,7 +25,7 @@ def scan(cpv, url):
|
||||
|
||||
package = guess_package(cpv, url)
|
||||
|
||||
output.einfo("Using PyPi XMLRPC: " + package)
|
||||
euscan.output.einfo("Using PyPi XMLRPC: " + package)
|
||||
|
||||
client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
|
||||
versions = client.package_releases(package)
|
||||
|
@ -3,7 +3,8 @@ import portage
|
||||
import json
|
||||
import urllib2
|
||||
|
||||
from euscan import helpers, output
|
||||
from euscan import helpers
|
||||
import euscan
|
||||
|
||||
def can_handle(cpv, url):
|
||||
return url.startswith('mirror://rubygems/')
|
||||
@ -24,7 +25,7 @@ def scan(cpv, url):
|
||||
gem = guess_gem(cpv, url)
|
||||
url = 'http://rubygems.org/api/v1/versions/%s.json' % gem
|
||||
|
||||
output.einfo("Using: " + url)
|
||||
euscan.output.einfo("Using: " + url)
|
||||
|
||||
try:
|
||||
fp = helpers.urlopen(url)
|
||||
|
@ -15,7 +15,8 @@ except ImportError:
|
||||
import portage
|
||||
from portage import dep
|
||||
|
||||
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS, output
|
||||
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS
|
||||
import euscan
|
||||
|
||||
def htop_vercmp(a, b):
|
||||
def fixver(v):
|
||||
@ -87,7 +88,7 @@ def version_blacklisted(cp, version):
|
||||
None
|
||||
|
||||
if rule:
|
||||
output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
|
||||
euscan.output.einfo("%s is blacklisted by rule %s" % (cpv, bv))
|
||||
return rule is not None
|
||||
|
||||
def version_filtered(cp, base, version):
|
||||
@ -254,7 +255,7 @@ def urlallowed(url):
|
||||
|
||||
def urlopen(url, timeout=None, verb="GET"):
|
||||
if not urlallowed(url):
|
||||
output.einfo("Url '%s' blocked by robots.txt" % url)
|
||||
euscan.output.einfo("Url '%s' blocked by robots.txt" % url)
|
||||
return None
|
||||
|
||||
if not timeout:
|
||||
@ -287,17 +288,17 @@ def tryurl(fileurl, template):
|
||||
result = True
|
||||
|
||||
if not urlallowed(fileurl):
|
||||
output.einfo("Url '%s' blocked by robots.txt" % fileurl)
|
||||
euscan.output.einfo("Url '%s' blocked by robots.txt" % fileurl)
|
||||
return None
|
||||
|
||||
output.ebegin("Trying: " + fileurl)
|
||||
euscan.output.ebegin("Trying: " + fileurl)
|
||||
|
||||
try:
|
||||
basename = os.path.basename(fileurl)
|
||||
|
||||
fp = urlopen(fileurl, verb='HEAD')
|
||||
if not fp:
|
||||
output.eend(errno.EPERM)
|
||||
euscan.output.eend(errno.EPERM)
|
||||
return None
|
||||
|
||||
headers = fp.info()
|
||||
@ -328,21 +329,31 @@ def tryurl(fileurl, template):
|
||||
except IOError:
|
||||
result = None
|
||||
|
||||
output.eend(errno.ENOENT if not result else 0)
|
||||
euscan.output.eend(errno.ENOENT if not result else 0)
|
||||
|
||||
return result
|
||||
|
||||
def regex_from_template(template):
|
||||
# Escape
|
||||
template = re.escape(template)
|
||||
|
||||
# Unescape specific stuff
|
||||
template = template.replace('\$\{', '${')
|
||||
template = template.replace('\}', '}')
|
||||
template = template.replace('}\.$', '}.$')
|
||||
template = template.replace('${1}', r'([\d]+?)')
|
||||
template = re.sub(r'(\$\{\d+\}\.?)+', r'([\w]+?)', template)
|
||||
|
||||
# Replace ${\d+}
|
||||
#template = template.replace('${0}', r'([\d]+?)')
|
||||
template = re.sub(r'(\$\{\d+\}(\.?))+', r'([\w\.]+?)', template)
|
||||
|
||||
#template = re.sub(r'(\$\{\d+\}\.?)+', r'([\w]+?)', template)
|
||||
#template = re.sub(r'(\$\{\d+\}\.+)+', '(.+?)\.', template)
|
||||
#template = re.sub(r'(\$\{\d+\})+', '(.+?)', template)
|
||||
|
||||
# Full version
|
||||
template = template.replace('${PV}', _v)
|
||||
|
||||
# End
|
||||
template = template + r'/?$'
|
||||
return template
|
||||
|
||||
@ -371,6 +382,7 @@ def generate_scan_paths(url):
|
||||
else:
|
||||
path += "/"
|
||||
path += chunk
|
||||
|
||||
return steps
|
||||
|
||||
def parse_mirror(uri):
|
||||
@ -383,8 +395,8 @@ def parse_mirror(uri):
|
||||
|
||||
eidx = uri.find("/", 9)
|
||||
if eidx == -1:
|
||||
output.einfo("Invalid mirror definition in SRC_URI:\n")
|
||||
output.einfo(" %s\n" % (uri))
|
||||
euscan.output.einfo("Invalid mirror definition in SRC_URI:\n")
|
||||
euscan.output.einfo(" %s\n" % (uri))
|
||||
return None
|
||||
|
||||
mirrorname = uri[9:eidx]
|
||||
@ -395,7 +407,7 @@ def parse_mirror(uri):
|
||||
shuffle(mirrors)
|
||||
uri = mirrors[0].strip("/") + "/" + path
|
||||
else:
|
||||
output.einfo("No known mirror by the name: %s\n" % (mirrorname))
|
||||
euscan.output.einfo("No known mirror by the name: %s" % (mirrorname))
|
||||
return None
|
||||
|
||||
return uri
|
||||
|
@ -11,10 +11,12 @@ from gentoolkit import errors
|
||||
from gentoolkit.query import Query
|
||||
from gentoolkit.eclean.search import (port_settings)
|
||||
|
||||
from euscan import CONFIG, BLACKLIST_PACKAGES, output
|
||||
from euscan import CONFIG, BLACKLIST_PACKAGES
|
||||
from euscan import handlers
|
||||
from euscan import helpers
|
||||
|
||||
import euscan
|
||||
|
||||
def filter_versions(cp, versions):
|
||||
filtered = {}
|
||||
|
||||
@ -31,18 +33,19 @@ def filter_versions(cp, versions):
|
||||
|
||||
filtered[version] = url
|
||||
|
||||
return [ (filtered[version], version) for version in filtered ]
|
||||
return [ (cp, filtered[version], version) for version in filtered ]
|
||||
|
||||
def scan_upstream_urls(cpv, urls):
|
||||
versions = []
|
||||
|
||||
for filename in urls:
|
||||
for url in urls[filename]:
|
||||
pp.uprint()
|
||||
output.einfo("SRC_URI is '%s'" % url)
|
||||
if not CONFIG['quiet']:
|
||||
pp.uprint()
|
||||
euscan.output.einfo("SRC_URI is '%s'" % url)
|
||||
|
||||
if '://' not in url:
|
||||
output.einfo("Invalid url '%s'" % url)
|
||||
euscan.output.einfo("Invalid url '%s'" % url)
|
||||
continue
|
||||
|
||||
''' Try normal scan '''
|
||||
@ -64,57 +67,59 @@ def scan_upstream_urls(cpv, urls):
|
||||
|
||||
|
||||
def scan_upstream(query):
|
||||
matches = Query(query).find(
|
||||
include_masked=True,
|
||||
in_installed=False
|
||||
)
|
||||
matches = Query(query).find(
|
||||
include_masked=True,
|
||||
in_installed=False
|
||||
)
|
||||
|
||||
if not matches:
|
||||
sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
|
||||
return []
|
||||
if not matches:
|
||||
sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query)))
|
||||
return []
|
||||
|
||||
matches = sorted(matches)
|
||||
matches = sorted(matches)
|
||||
pkg = matches.pop()
|
||||
|
||||
while '9999' in pkg.version and len(matches):
|
||||
pkg = matches.pop()
|
||||
|
||||
while '9999' in pkg.version and len(matches):
|
||||
pkg = matches.pop()
|
||||
if not pkg:
|
||||
sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)"
|
||||
% pp.pkgquery(pkg.cp)))
|
||||
return []
|
||||
|
||||
if not pkg:
|
||||
sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)))
|
||||
return []
|
||||
|
||||
if pkg.cp in BLACKLIST_PACKAGES:
|
||||
sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
|
||||
return []
|
||||
if pkg.cp in BLACKLIST_PACKAGES:
|
||||
sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)))
|
||||
return []
|
||||
|
||||
if not CONFIG['quiet']:
|
||||
pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())))
|
||||
pp.uprint()
|
||||
|
||||
ebuild_path = pkg.ebuild_path()
|
||||
if ebuild_path:
|
||||
pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
|
||||
pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path)))
|
||||
|
||||
pp.uprint('Repository: ' + pkg.repo_name())
|
||||
pp.uprint('Homepage: ' + pkg.environment("HOMEPAGE"))
|
||||
pp.uprint('Description: ' + pkg.environment("DESCRIPTION"))
|
||||
|
||||
cpv = pkg.cpv
|
||||
metadata = {
|
||||
"EAPI" : port_settings["EAPI"],
|
||||
"SRC_URI" : pkg.environment("SRC_URI", False),
|
||||
}
|
||||
use = frozenset(port_settings["PORTAGE_USE"].split())
|
||||
try:
|
||||
alist = porttree._parse_uri_map(cpv, metadata, use=use)
|
||||
aalist = porttree._parse_uri_map(cpv, metadata)
|
||||
except Exception as e:
|
||||
sys.stderr.write(pp.warn("%s\n" % str(e)))
|
||||
sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
|
||||
return []
|
||||
cpv = pkg.cpv
|
||||
metadata = {
|
||||
"EAPI" : port_settings["EAPI"],
|
||||
"SRC_URI" : pkg.environment("SRC_URI", False),
|
||||
}
|
||||
use = frozenset(port_settings["PORTAGE_USE"].split())
|
||||
try:
|
||||
alist = porttree._parse_uri_map(cpv, metadata, use=use)
|
||||
aalist = porttree._parse_uri_map(cpv, metadata)
|
||||
except Exception as e:
|
||||
sys.stderr.write(pp.warn("%s\n" % str(e)))
|
||||
sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)))
|
||||
return []
|
||||
|
||||
if "mirror" in portage.settings.features:
|
||||
urls = aalist
|
||||
else:
|
||||
urls = alist
|
||||
if "mirror" in portage.settings.features:
|
||||
urls = aalist
|
||||
else:
|
||||
urls = alist
|
||||
|
||||
return scan_upstream_urls(pkg.cpv, urls)
|
||||
return scan_upstream_urls(pkg.cpv, urls)
|
||||
|
7
setup.py
7
setup.py
@ -21,7 +21,7 @@ cwd = os.getcwd()
|
||||
try:
|
||||
from portage.const import EPREFIX
|
||||
except ImportError:
|
||||
EPREFIX='/'
|
||||
EPREFIX=''
|
||||
|
||||
# Python files that need `__version__ = ""` subbed, relative to this dir:
|
||||
python_scripts = [os.path.join(cwd, path) for path in (
|
||||
@ -72,15 +72,16 @@ core.setup(
|
||||
maintainer='Corentin Chary',
|
||||
maintainer_email='corentin.chary@gmail.com',
|
||||
url='http://euscan.iksaif.net',
|
||||
download_url='http://git.iksaif.net/?p=euscan.git;a=snapshot;h=HEAD;sf=tgz',
|
||||
download_url='https://github.com/iksaif/euscan/tarball/' + ('master' if __version__ == '9999' else ('euscan-%s' % __version__)),
|
||||
package_dir={'': 'pym'},
|
||||
packages=packages,
|
||||
package_data = {},
|
||||
scripts=python_scripts,
|
||||
data_files=(
|
||||
(os.path.join(EPREFIX, 'usr/share/man/man1'), glob('man/*')),
|
||||
(os.path.join(os.sep, EPREFIX.lstrip(os.sep), 'usr/share/man/man1'), glob('man/*')),
|
||||
),
|
||||
cmdclass={
|
||||
'set_version': set_version,
|
||||
},
|
||||
)
|
||||
|
||||
|
Reference in New Issue
Block a user