Compare commits
No commits in common. "b2cd013b091f990e139b6ec3b53db92e7d678eea" and "9465c143427b630da38b0cb8c614164dbb8f350c" have entirely different histories.
b2cd013b09
...
9465c14342
@ -16,8 +16,7 @@ description = "Ebuild upstream scan utility."
|
||||
license = {text = "GPL-2.0"}
|
||||
dependencies = [
|
||||
"portage",
|
||||
"beautifulsoup4>=4.8.2",
|
||||
"packaging"
|
||||
"beautifulsoup4>=4.8.2"
|
||||
]
|
||||
dynamic = ["version"]
|
||||
|
||||
|
@ -53,11 +53,6 @@ BLACKLIST_PACKAGES = [
|
||||
SCANDIR_BLACKLIST_URLS = [
|
||||
"mirror://rubygems/(.*)", # Not browsable
|
||||
"mirror://gentoo/(.*)", # Directory too big
|
||||
"https://dev.gentoo.org/(.*)", # There shouldn't be releases here
|
||||
# Waste of time to go through
|
||||
"https://crates.io/(.*)",
|
||||
"https://api.nuget.org/(.*)",
|
||||
"https://myget.org/(.*)",
|
||||
]
|
||||
|
||||
BRUTEFORCE_BLACKLIST_PACKAGES = [
|
||||
|
@ -1,70 +0,0 @@
|
||||
# Copyright 2020-2024 src_prepare group
|
||||
# Distributed under the terms of the GNU General Public License v2
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
import portage
|
||||
|
||||
from euscan import helpers, mangling, output
|
||||
|
||||
HANDLER_NAME = "gitea"
|
||||
CONFIDENCE = 100
|
||||
PRIORITY = 90
|
||||
|
||||
# Forgejo strives to be compatible with Gitea API
|
||||
# https://forgejo.org/2024-02-forking-forward/
|
||||
|
||||
_gitea_instances = [
|
||||
"codeberg.org",
|
||||
"git.osgeo.org",
|
||||
"gitea.com",
|
||||
"gitea.ladish.org",
|
||||
"gitea.osmocom.org",
|
||||
"gitea.treehouse.systems",
|
||||
]
|
||||
|
||||
gitea_patterns = [
|
||||
re.compile(rf"https://(?P<domain>{domain})/(?P<repository>[^/]+/[^/]+)")
|
||||
for domain in _gitea_instances
|
||||
]
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and any([re.search(pattern, url) for pattern in gitea_patterns])
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
"https://docs.gitea.com/api/1.20/#tag/repository/operation/repoListReleases"
|
||||
|
||||
match = [
|
||||
re.search(pattern, url)
|
||||
for pattern in gitea_patterns
|
||||
if re.search(pattern, url) is not None
|
||||
][0]
|
||||
|
||||
domain = match.group("domain")
|
||||
repository = match.group("repository")
|
||||
|
||||
output.einfo(f"Using Gitea API in {domain}: {repository}")
|
||||
|
||||
request = helpers.urlopen(f"https://{domain}/api/v1/repos/{repository}/releases")
|
||||
|
||||
data = json.load(request)
|
||||
|
||||
versions = [release["tag_name"] for release in data]
|
||||
|
||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||
|
||||
ret = []
|
||||
for up_pv in versions:
|
||||
pv = mangling.mangle_version(up_pv, options)
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
urls = " ".join(
|
||||
mangling.mangle_url(release["tarball_url"], options)
|
||||
for release in data
|
||||
if release["tag_name"] == up_pv
|
||||
)
|
||||
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
@ -1,82 +0,0 @@
|
||||
# Copyright 2020-2024 src_prepare group
|
||||
# Distributed under the terms of the GNU General Public License v2
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
import portage
|
||||
|
||||
from euscan import helpers, mangling, output
|
||||
|
||||
HANDLER_NAME = "gitlab"
|
||||
CONFIDENCE = 100
|
||||
PRIORITY = 90
|
||||
|
||||
_gitlab_instances = [
|
||||
"gitlab.com",
|
||||
"gitlab.freedesktop.org",
|
||||
"invent.kde.org/",
|
||||
"gitlab.gnome.org",
|
||||
"gitlab.kitware.com",
|
||||
"gitlab.xfce.org",
|
||||
"code.videolan.org",
|
||||
"gitlab.xiph.org",
|
||||
]
|
||||
|
||||
gitlab_patterns = [
|
||||
# Regular expression adapted from pkgcheck
|
||||
# https://docs.gitlab.com/ee/user/reserved_names.html
|
||||
re.compile(
|
||||
rf"https://(?P<domain>{domain})/(?P<repository>((?!api/)\w[^/]*/)+(?!raw/)\w[^/]*)"
|
||||
)
|
||||
for domain in _gitlab_instances
|
||||
]
|
||||
|
||||
|
||||
def can_handle(pkg, url=None):
|
||||
return url and any([re.search(pattern, url) for pattern in gitlab_patterns])
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
"https://docs.gitlab.com/ee/api/releases/index.html"
|
||||
|
||||
match = [
|
||||
re.search(pattern, url)
|
||||
for pattern in gitlab_patterns
|
||||
if re.search(pattern, url) is not None
|
||||
][0]
|
||||
|
||||
domain = match.group("domain")
|
||||
repository = match.group("repository")
|
||||
|
||||
output.einfo(f"Using GitLab REST API in {domain}: {repository}")
|
||||
|
||||
request = helpers.urlopen(
|
||||
f"https://{domain}/api/v4/projects/{repository.replace('/', '%2F')}/releases"
|
||||
)
|
||||
|
||||
data = json.load(request)
|
||||
|
||||
versions = [release["tag_name"] for release in data]
|
||||
|
||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||
|
||||
ret = []
|
||||
for up_pv in versions:
|
||||
pv = mangling.mangle_version(up_pv, options)
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
urls = " ".join(
|
||||
[
|
||||
mangling.mangle_url(source["url"], options)
|
||||
for source in [
|
||||
release["assets"]["sources"]
|
||||
for release in data
|
||||
if release["tag_name"] == up_pv
|
||||
][0]
|
||||
# prefer tar.bz2
|
||||
if source["format"] == "tar.bz2"
|
||||
]
|
||||
)
|
||||
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
@ -20,7 +20,7 @@ HANDLER_NAME = "gnome"
|
||||
CONFIDENCE = 100
|
||||
PRIORITY = 90
|
||||
|
||||
GNOME_URL_SOURCE = "https://download.gnome.org/sources"
|
||||
GNOME_URL_SOURCE = "http://ftp.gnome.org/pub/GNOME/sources"
|
||||
|
||||
|
||||
def can_handle(_pkg, url=None):
|
||||
@ -38,7 +38,7 @@ def guess_package(cp, url):
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
"https://download.gnome.org/sources/"
|
||||
"http://ftp.gnome.org/pub/GNOME/sources/"
|
||||
package = {
|
||||
"data": guess_package(pkg.cpv, url),
|
||||
"type": "gnome",
|
||||
@ -55,7 +55,7 @@ def scan_pkg(pkg, options):
|
||||
content = fp.read()
|
||||
fp.close()
|
||||
|
||||
cache = json.loads(content)
|
||||
cache = json.loads(content, encoding="ascii")
|
||||
|
||||
if cache[0] != 4:
|
||||
output.eerror("Unknow cache format detected")
|
||||
|
@ -1,13 +1,11 @@
|
||||
# Copyright 2011 Corentin Chary <corentin.chary@gmail.com>
|
||||
# Copyright 2020-2024 src_prepare group
|
||||
# Copyright 2020-2023 src_prepare group
|
||||
# Distributed under the terms of the GNU General Public License v2
|
||||
|
||||
import json
|
||||
import re
|
||||
import urllib.error
|
||||
import xmlrpc.client
|
||||
|
||||
import portage
|
||||
from packaging.version import parse
|
||||
|
||||
from euscan import helpers, mangling, output
|
||||
|
||||
@ -31,7 +29,7 @@ def guess_package(cp, url):
|
||||
|
||||
|
||||
def scan_url(pkg, url, options):
|
||||
"https://peps.python.org/pep-0691/"
|
||||
"http://wiki.python.org/moin/PyPiXmlRpc"
|
||||
|
||||
package = guess_package(pkg.cpv, url)
|
||||
return scan_pkg(pkg, {"data": package})
|
||||
@ -40,23 +38,15 @@ def scan_url(pkg, url, options):
|
||||
def scan_pkg(pkg, options):
|
||||
package = options["data"]
|
||||
|
||||
output.einfo("Using PyPi JSON API: " + package)
|
||||
output.einfo("Using PyPi XMLRPC: " + package)
|
||||
|
||||
try:
|
||||
fp = helpers.urlopen(f"https://pypi.org/pypi/{package}/json/")
|
||||
except urllib.error.URLError:
|
||||
return []
|
||||
except OSError:
|
||||
return []
|
||||
client = xmlrpc.client.ServerProxy("https://pypi.python.org/pypi")
|
||||
versions = client.package_releases(package)
|
||||
|
||||
if not fp:
|
||||
return []
|
||||
if not versions:
|
||||
return versions
|
||||
|
||||
data = json.loads(fp.read())
|
||||
|
||||
versions = list(data["releases"].keys())
|
||||
|
||||
versions.sort(key=parse, reverse=True)
|
||||
versions.reverse()
|
||||
|
||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||
|
||||
@ -65,12 +55,7 @@ def scan_pkg(pkg, options):
|
||||
pv = mangling.mangle_version(up_pv, options)
|
||||
if helpers.version_filtered(cp, ver, pv):
|
||||
continue
|
||||
urls = " ".join(
|
||||
[
|
||||
mangling.mangle_url(file["url"], options)
|
||||
for file in data["releases"][up_pv]
|
||||
if file["packagetype"] == "sdist"
|
||||
]
|
||||
)
|
||||
urls = client.release_urls(package, up_pv)
|
||||
urls = " ".join([mangling.mangle_url(infos["url"], options) for infos in urls])
|
||||
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||
return ret
|
||||
|
@ -153,9 +153,6 @@ def scan_upstream(query, on_progress=None):
|
||||
else:
|
||||
uris = pkg.environment("SRC_URI")
|
||||
|
||||
# Roundabout way to handle $'' strings
|
||||
uris = uris.encode("raw_unicode_escape").decode("unicode_escape")
|
||||
|
||||
cpv = pkg.cpv
|
||||
|
||||
uris = parse_src_uri(uris)
|
||||
|
Loading…
Reference in New Issue
Block a user