2012-06-28 11:08:05 +02:00
|
|
|
import json
|
|
|
|
import urllib2
|
2012-06-14 12:33:55 +02:00
|
|
|
import re
|
|
|
|
|
|
|
|
import portage
|
|
|
|
|
2012-08-03 21:50:54 +02:00
|
|
|
from euscan import helpers, output, mangling
|
2012-06-14 12:33:55 +02:00
|
|
|
|
|
|
|
HANDLER_NAME = "github"
|
2012-07-27 11:26:54 +02:00
|
|
|
CONFIDENCE = 100
|
2012-07-24 15:02:36 +02:00
|
|
|
PRIORITY = 90
|
2012-06-28 11:08:05 +02:00
|
|
|
|
2012-06-14 12:33:55 +02:00
|
|
|
|
2012-08-03 21:50:54 +02:00
|
|
|
def can_handle(pkg, url=None):
|
|
|
|
return url and url.startswith('mirror://github/')
|
2012-06-28 11:08:05 +02:00
|
|
|
|
2012-08-07 16:42:27 +02:00
|
|
|
|
2012-06-14 12:33:55 +02:00
|
|
|
def guess_package(cp, url):
|
|
|
|
match = re.search('^mirror://github/(.*?)/(.*?)/(.*)$', url)
|
|
|
|
|
|
|
|
assert(match)
|
|
|
|
return (match.group(1), match.group(2), match.group(3))
|
|
|
|
|
2012-08-07 16:42:27 +02:00
|
|
|
|
2012-08-03 21:50:54 +02:00
|
|
|
def scan_url(pkg, url, options):
|
2012-06-14 12:33:55 +02:00
|
|
|
'http://developer.github.com/v3/repos/downloads/'
|
|
|
|
|
2012-07-24 08:56:00 +02:00
|
|
|
user, project, filename = guess_package(pkg.cpv, url)
|
2012-06-14 12:33:55 +02:00
|
|
|
|
|
|
|
# find out where version is expected to be found
|
2012-07-24 08:56:00 +02:00
|
|
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
2012-06-14 12:33:55 +02:00
|
|
|
if ver not in filename:
|
|
|
|
return
|
|
|
|
|
|
|
|
# now create a filename-matching regexp
|
|
|
|
# XXX: supposedly replace first with (?P<foo>...)
|
|
|
|
# and remaining ones with (?P=foo)
|
2012-06-28 12:43:46 +02:00
|
|
|
fnre = re.compile('^%s$' % \
|
|
|
|
re.escape(filename).replace(re.escape(ver), '(.*?)'))
|
2012-06-14 12:33:55 +02:00
|
|
|
|
2012-08-03 21:50:54 +02:00
|
|
|
output.einfo("Using github API for: project=%s user=%s filename=%s" % \
|
|
|
|
(project, user, filename))
|
2012-06-14 12:33:55 +02:00
|
|
|
|
2012-06-28 12:43:46 +02:00
|
|
|
dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
|
|
|
|
(user, project))
|
2012-06-14 12:33:55 +02:00
|
|
|
dls = json.load(dlreq)
|
|
|
|
|
2012-07-26 17:46:53 +02:00
|
|
|
ret = []
|
2012-06-14 12:33:55 +02:00
|
|
|
for dl in dls:
|
|
|
|
m = fnre.match(dl['name'])
|
|
|
|
|
|
|
|
if m:
|
2012-08-03 21:50:54 +02:00
|
|
|
pv = mangling.mangle_version(m.group(1), options)
|
2012-06-14 12:33:55 +02:00
|
|
|
if helpers.version_filtered(cp, ver, pv):
|
|
|
|
continue
|
2012-07-26 17:46:53 +02:00
|
|
|
|
2012-08-03 21:50:54 +02:00
|
|
|
url = mangling.mangle_url(dl['html_url'], options)
|
|
|
|
ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
|
2012-07-26 17:46:53 +02:00
|
|
|
return ret
|