Compare commits
7 Commits
9465c14342
...
b2cd013b09
Author | SHA1 | Date | |
---|---|---|---|
|
b2cd013b09 | ||
|
e9fd94e1a5 | ||
|
e13a62af84 | ||
|
d93c3154ac | ||
|
9809d9a805 | ||
|
d217c839a9 | ||
|
aad99f71fe |
@ -16,7 +16,8 @@ description = "Ebuild upstream scan utility."
|
|||||||
license = {text = "GPL-2.0"}
|
license = {text = "GPL-2.0"}
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"portage",
|
"portage",
|
||||||
"beautifulsoup4>=4.8.2"
|
"beautifulsoup4>=4.8.2",
|
||||||
|
"packaging"
|
||||||
]
|
]
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
|
|
||||||
|
@ -53,6 +53,11 @@ BLACKLIST_PACKAGES = [
|
|||||||
SCANDIR_BLACKLIST_URLS = [
|
SCANDIR_BLACKLIST_URLS = [
|
||||||
"mirror://rubygems/(.*)", # Not browsable
|
"mirror://rubygems/(.*)", # Not browsable
|
||||||
"mirror://gentoo/(.*)", # Directory too big
|
"mirror://gentoo/(.*)", # Directory too big
|
||||||
|
"https://dev.gentoo.org/(.*)", # There shouldn't be releases here
|
||||||
|
# Waste of time to go through
|
||||||
|
"https://crates.io/(.*)",
|
||||||
|
"https://api.nuget.org/(.*)",
|
||||||
|
"https://myget.org/(.*)",
|
||||||
]
|
]
|
||||||
|
|
||||||
BRUTEFORCE_BLACKLIST_PACKAGES = [
|
BRUTEFORCE_BLACKLIST_PACKAGES = [
|
||||||
|
70
src/euscan/handlers/gitea.py
Normal file
70
src/euscan/handlers/gitea.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
# Copyright 2020-2024 src_prepare group
|
||||||
|
# Distributed under the terms of the GNU General Public License v2
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
import portage
|
||||||
|
|
||||||
|
from euscan import helpers, mangling, output
|
||||||
|
|
||||||
|
HANDLER_NAME = "gitea"
|
||||||
|
CONFIDENCE = 100
|
||||||
|
PRIORITY = 90
|
||||||
|
|
||||||
|
# Forgejo strives to be compatible with Gitea API
|
||||||
|
# https://forgejo.org/2024-02-forking-forward/
|
||||||
|
|
||||||
|
_gitea_instances = [
|
||||||
|
"codeberg.org",
|
||||||
|
"git.osgeo.org",
|
||||||
|
"gitea.com",
|
||||||
|
"gitea.ladish.org",
|
||||||
|
"gitea.osmocom.org",
|
||||||
|
"gitea.treehouse.systems",
|
||||||
|
]
|
||||||
|
|
||||||
|
gitea_patterns = [
|
||||||
|
re.compile(rf"https://(?P<domain>{domain})/(?P<repository>[^/]+/[^/]+)")
|
||||||
|
for domain in _gitea_instances
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def can_handle(pkg, url=None):
|
||||||
|
return url and any([re.search(pattern, url) for pattern in gitea_patterns])
|
||||||
|
|
||||||
|
|
||||||
|
def scan_url(pkg, url, options):
|
||||||
|
"https://docs.gitea.com/api/1.20/#tag/repository/operation/repoListReleases"
|
||||||
|
|
||||||
|
match = [
|
||||||
|
re.search(pattern, url)
|
||||||
|
for pattern in gitea_patterns
|
||||||
|
if re.search(pattern, url) is not None
|
||||||
|
][0]
|
||||||
|
|
||||||
|
domain = match.group("domain")
|
||||||
|
repository = match.group("repository")
|
||||||
|
|
||||||
|
output.einfo(f"Using Gitea API in {domain}: {repository}")
|
||||||
|
|
||||||
|
request = helpers.urlopen(f"https://{domain}/api/v1/repos/{repository}/releases")
|
||||||
|
|
||||||
|
data = json.load(request)
|
||||||
|
|
||||||
|
versions = [release["tag_name"] for release in data]
|
||||||
|
|
||||||
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
for up_pv in versions:
|
||||||
|
pv = mangling.mangle_version(up_pv, options)
|
||||||
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
|
continue
|
||||||
|
urls = " ".join(
|
||||||
|
mangling.mangle_url(release["tarball_url"], options)
|
||||||
|
for release in data
|
||||||
|
if release["tag_name"] == up_pv
|
||||||
|
)
|
||||||
|
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
|
return ret
|
82
src/euscan/handlers/gitlab.py
Normal file
82
src/euscan/handlers/gitlab.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
# Copyright 2020-2024 src_prepare group
|
||||||
|
# Distributed under the terms of the GNU General Public License v2
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
import portage
|
||||||
|
|
||||||
|
from euscan import helpers, mangling, output
|
||||||
|
|
||||||
|
HANDLER_NAME = "gitlab"
|
||||||
|
CONFIDENCE = 100
|
||||||
|
PRIORITY = 90
|
||||||
|
|
||||||
|
_gitlab_instances = [
|
||||||
|
"gitlab.com",
|
||||||
|
"gitlab.freedesktop.org",
|
||||||
|
"invent.kde.org/",
|
||||||
|
"gitlab.gnome.org",
|
||||||
|
"gitlab.kitware.com",
|
||||||
|
"gitlab.xfce.org",
|
||||||
|
"code.videolan.org",
|
||||||
|
"gitlab.xiph.org",
|
||||||
|
]
|
||||||
|
|
||||||
|
gitlab_patterns = [
|
||||||
|
# Regular expression adapted from pkgcheck
|
||||||
|
# https://docs.gitlab.com/ee/user/reserved_names.html
|
||||||
|
re.compile(
|
||||||
|
rf"https://(?P<domain>{domain})/(?P<repository>((?!api/)\w[^/]*/)+(?!raw/)\w[^/]*)"
|
||||||
|
)
|
||||||
|
for domain in _gitlab_instances
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def can_handle(pkg, url=None):
|
||||||
|
return url and any([re.search(pattern, url) for pattern in gitlab_patterns])
|
||||||
|
|
||||||
|
|
||||||
|
def scan_url(pkg, url, options):
|
||||||
|
"https://docs.gitlab.com/ee/api/releases/index.html"
|
||||||
|
|
||||||
|
match = [
|
||||||
|
re.search(pattern, url)
|
||||||
|
for pattern in gitlab_patterns
|
||||||
|
if re.search(pattern, url) is not None
|
||||||
|
][0]
|
||||||
|
|
||||||
|
domain = match.group("domain")
|
||||||
|
repository = match.group("repository")
|
||||||
|
|
||||||
|
output.einfo(f"Using GitLab REST API in {domain}: {repository}")
|
||||||
|
|
||||||
|
request = helpers.urlopen(
|
||||||
|
f"https://{domain}/api/v4/projects/{repository.replace('/', '%2F')}/releases"
|
||||||
|
)
|
||||||
|
|
||||||
|
data = json.load(request)
|
||||||
|
|
||||||
|
versions = [release["tag_name"] for release in data]
|
||||||
|
|
||||||
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
for up_pv in versions:
|
||||||
|
pv = mangling.mangle_version(up_pv, options)
|
||||||
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
|
continue
|
||||||
|
urls = " ".join(
|
||||||
|
[
|
||||||
|
mangling.mangle_url(source["url"], options)
|
||||||
|
for source in [
|
||||||
|
release["assets"]["sources"]
|
||||||
|
for release in data
|
||||||
|
if release["tag_name"] == up_pv
|
||||||
|
][0]
|
||||||
|
# prefer tar.bz2
|
||||||
|
if source["format"] == "tar.bz2"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
|
return ret
|
@ -20,7 +20,7 @@ HANDLER_NAME = "gnome"
|
|||||||
CONFIDENCE = 100
|
CONFIDENCE = 100
|
||||||
PRIORITY = 90
|
PRIORITY = 90
|
||||||
|
|
||||||
GNOME_URL_SOURCE = "http://ftp.gnome.org/pub/GNOME/sources"
|
GNOME_URL_SOURCE = "https://download.gnome.org/sources"
|
||||||
|
|
||||||
|
|
||||||
def can_handle(_pkg, url=None):
|
def can_handle(_pkg, url=None):
|
||||||
@ -38,7 +38,7 @@ def guess_package(cp, url):
|
|||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
"http://ftp.gnome.org/pub/GNOME/sources/"
|
"https://download.gnome.org/sources/"
|
||||||
package = {
|
package = {
|
||||||
"data": guess_package(pkg.cpv, url),
|
"data": guess_package(pkg.cpv, url),
|
||||||
"type": "gnome",
|
"type": "gnome",
|
||||||
@ -55,7 +55,7 @@ def scan_pkg(pkg, options):
|
|||||||
content = fp.read()
|
content = fp.read()
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
cache = json.loads(content, encoding="ascii")
|
cache = json.loads(content)
|
||||||
|
|
||||||
if cache[0] != 4:
|
if cache[0] != 4:
|
||||||
output.eerror("Unknow cache format detected")
|
output.eerror("Unknow cache format detected")
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
# Copyright 2011 Corentin Chary <corentin.chary@gmail.com>
|
# Copyright 2011 Corentin Chary <corentin.chary@gmail.com>
|
||||||
# Copyright 2020-2023 src_prepare group
|
# Copyright 2020-2024 src_prepare group
|
||||||
# Distributed under the terms of the GNU General Public License v2
|
# Distributed under the terms of the GNU General Public License v2
|
||||||
|
|
||||||
|
import json
|
||||||
import re
|
import re
|
||||||
import xmlrpc.client
|
import urllib.error
|
||||||
|
|
||||||
import portage
|
import portage
|
||||||
|
from packaging.version import parse
|
||||||
|
|
||||||
from euscan import helpers, mangling, output
|
from euscan import helpers, mangling, output
|
||||||
|
|
||||||
@ -29,7 +31,7 @@ def guess_package(cp, url):
|
|||||||
|
|
||||||
|
|
||||||
def scan_url(pkg, url, options):
|
def scan_url(pkg, url, options):
|
||||||
"http://wiki.python.org/moin/PyPiXmlRpc"
|
"https://peps.python.org/pep-0691/"
|
||||||
|
|
||||||
package = guess_package(pkg.cpv, url)
|
package = guess_package(pkg.cpv, url)
|
||||||
return scan_pkg(pkg, {"data": package})
|
return scan_pkg(pkg, {"data": package})
|
||||||
@ -38,15 +40,23 @@ def scan_url(pkg, url, options):
|
|||||||
def scan_pkg(pkg, options):
|
def scan_pkg(pkg, options):
|
||||||
package = options["data"]
|
package = options["data"]
|
||||||
|
|
||||||
output.einfo("Using PyPi XMLRPC: " + package)
|
output.einfo("Using PyPi JSON API: " + package)
|
||||||
|
|
||||||
client = xmlrpc.client.ServerProxy("https://pypi.python.org/pypi")
|
try:
|
||||||
versions = client.package_releases(package)
|
fp = helpers.urlopen(f"https://pypi.org/pypi/{package}/json/")
|
||||||
|
except urllib.error.URLError:
|
||||||
|
return []
|
||||||
|
except OSError:
|
||||||
|
return []
|
||||||
|
|
||||||
if not versions:
|
if not fp:
|
||||||
return versions
|
return []
|
||||||
|
|
||||||
versions.reverse()
|
data = json.loads(fp.read())
|
||||||
|
|
||||||
|
versions = list(data["releases"].keys())
|
||||||
|
|
||||||
|
versions.sort(key=parse, reverse=True)
|
||||||
|
|
||||||
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
||||||
|
|
||||||
@ -55,7 +65,12 @@ def scan_pkg(pkg, options):
|
|||||||
pv = mangling.mangle_version(up_pv, options)
|
pv = mangling.mangle_version(up_pv, options)
|
||||||
if helpers.version_filtered(cp, ver, pv):
|
if helpers.version_filtered(cp, ver, pv):
|
||||||
continue
|
continue
|
||||||
urls = client.release_urls(package, up_pv)
|
urls = " ".join(
|
||||||
urls = " ".join([mangling.mangle_url(infos["url"], options) for infos in urls])
|
[
|
||||||
|
mangling.mangle_url(file["url"], options)
|
||||||
|
for file in data["releases"][up_pv]
|
||||||
|
if file["packagetype"] == "sdist"
|
||||||
|
]
|
||||||
|
)
|
||||||
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
|
||||||
return ret
|
return ret
|
||||||
|
@ -153,6 +153,9 @@ def scan_upstream(query, on_progress=None):
|
|||||||
else:
|
else:
|
||||||
uris = pkg.environment("SRC_URI")
|
uris = pkg.environment("SRC_URI")
|
||||||
|
|
||||||
|
# Roundabout way to handle $'' strings
|
||||||
|
uris = uris.encode("raw_unicode_escape").decode("unicode_escape")
|
||||||
|
|
||||||
cpv = pkg.cpv
|
cpv = pkg.cpv
|
||||||
|
|
||||||
uris = parse_src_uri(uris)
|
uris = parse_src_uri(uris)
|
||||||
|
Loading…
Reference in New Issue
Block a user