2012-07-18 14:02:26 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
import os
|
2012-07-22 10:26:40 +02:00
|
|
|
import sys
|
2012-07-18 14:02:26 +02:00
|
|
|
import re
|
|
|
|
import urllib
|
|
|
|
from tempfile import mkstemp
|
|
|
|
import tarfile
|
2012-07-21 09:22:26 +02:00
|
|
|
import gzip
|
2012-07-18 14:02:26 +02:00
|
|
|
import logging
|
|
|
|
import shutil
|
2012-07-25 13:49:47 +02:00
|
|
|
from difflib import unified_diff
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
from portage.exception import AmbiguousPackageName
|
2012-07-18 14:02:26 +02:00
|
|
|
from gentoolkit.query import Query
|
|
|
|
from BeautifulSoup import BeautifulSoup, SoupStrainer
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
# From portage-janitor
|
|
|
|
def guess_indent_values(before):
|
|
|
|
rindent = -1
|
|
|
|
indent = -1
|
|
|
|
tab = False
|
|
|
|
|
|
|
|
def guess_for_tags(tags):
|
|
|
|
for tag in tags:
|
|
|
|
for i in [0, 2, 4, 6, 8, 12, 16]:
|
2023-11-14 22:03:53 +01:00
|
|
|
if "\n%s<%s" % (" " * i, tag) in before:
|
2012-07-18 14:02:26 +02:00
|
|
|
return i, False
|
|
|
|
for i in [0, 1, 2]:
|
2023-11-14 22:03:53 +01:00
|
|
|
if "\n%s<%s" % ("\t" * i, tag) in before:
|
2012-07-18 14:02:26 +02:00
|
|
|
return i, True
|
|
|
|
return -1, False
|
|
|
|
|
|
|
|
rindent, tab = guess_for_tags(
|
2023-11-14 22:03:53 +01:00
|
|
|
["herd", "maintainer", "longdescription", "use", "upstream"]
|
2012-07-18 14:02:26 +02:00
|
|
|
)
|
|
|
|
if rindent == -1:
|
|
|
|
rindent = 2
|
2023-11-14 22:03:53 +01:00
|
|
|
rindent_str = ("\t" if tab else " ") * rindent
|
|
|
|
indent, tab = guess_for_tags(["watch", "name", "email"])
|
2012-07-18 14:02:26 +02:00
|
|
|
if indent == -1:
|
|
|
|
indent = rindent * 2 if rindent else 4
|
2023-11-14 22:03:53 +01:00
|
|
|
if rindent and rindent_str == "\t":
|
2012-07-18 14:02:26 +02:00
|
|
|
tab = True
|
2023-11-14 22:03:53 +01:00
|
|
|
indent_str = ("\t" if tab else " ") * indent
|
2012-07-18 14:02:26 +02:00
|
|
|
return rindent_str, indent_str
|
|
|
|
|
|
|
|
|
|
|
|
def get_watch_data(package):
|
2012-07-21 09:22:26 +02:00
|
|
|
deb_url, deb_type = get_deb_url(package.name)
|
|
|
|
if deb_type == "source":
|
|
|
|
return handle_source(deb_url)
|
|
|
|
if deb_type == "diff":
|
|
|
|
return handle_diff(deb_url)
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2012-07-21 09:22:26 +02:00
|
|
|
|
|
|
|
def handle_diff(deb_url):
|
|
|
|
_, temp_deb = mkstemp()
|
|
|
|
|
|
|
|
logger.info(" Downloading debian diff %s...", deb_url)
|
|
|
|
urllib.urlretrieve(deb_url, temp_deb)
|
|
|
|
|
|
|
|
watch_data = ""
|
|
|
|
|
2023-11-14 22:03:53 +01:00
|
|
|
fp = gzip.open(temp_deb, "rb")
|
2012-07-21 09:22:26 +02:00
|
|
|
for line in fp:
|
|
|
|
if re.match(r"\+\+\+ .+?/debian/watch", line):
|
|
|
|
fp.readline() # diff lines, don't care
|
|
|
|
cur_line = fp.readline()
|
|
|
|
while cur_line.startswith("+"):
|
|
|
|
watch_data += cur_line[1:]
|
|
|
|
cur_line = fp.readline()
|
|
|
|
fp.close()
|
|
|
|
|
|
|
|
os.unlink(temp_deb)
|
|
|
|
|
|
|
|
return watch_data
|
|
|
|
|
|
|
|
|
|
|
|
def handle_source(deb_url):
|
2012-07-18 14:02:26 +02:00
|
|
|
_, temp_deb = mkstemp()
|
|
|
|
temp_dir = os.path.dirname(temp_deb)
|
|
|
|
|
2012-07-20 16:10:53 +02:00
|
|
|
logger.info(" Downloading debian source %s...", deb_url)
|
2012-07-18 14:02:26 +02:00
|
|
|
urllib.urlretrieve(deb_url, temp_deb)
|
|
|
|
tar = tarfile.open(temp_deb)
|
|
|
|
|
|
|
|
watch_data = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
tar.extract("debian/watch", temp_dir)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
debian_path = os.path.join(temp_dir, "debian")
|
|
|
|
watch_path = os.path.join(debian_path, "watch")
|
|
|
|
watch_data = open(os.path.join(watch_path)).read()
|
|
|
|
shutil.rmtree(debian_path)
|
|
|
|
|
|
|
|
os.unlink(temp_deb)
|
|
|
|
|
|
|
|
return watch_data
|
|
|
|
|
|
|
|
|
|
|
|
def get_deb_url(name):
|
|
|
|
deb_url = None
|
2012-07-21 09:22:26 +02:00
|
|
|
deb_type = None
|
2012-07-18 14:02:26 +02:00
|
|
|
|
|
|
|
while not deb_url:
|
|
|
|
url = "http://packages.debian.org/source/unstable/%s" % name
|
|
|
|
opened = urllib.urlopen(url)
|
|
|
|
|
|
|
|
content = opened.read()
|
|
|
|
|
|
|
|
for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer("a")):
|
2012-07-26 15:50:30 +02:00
|
|
|
if re.match("[^\s]+\.debian\.tar\.(?:gz|bz2)", link.text):
|
2012-07-18 14:02:26 +02:00
|
|
|
deb_url = link["href"]
|
2012-07-21 09:22:26 +02:00
|
|
|
deb_type = "source"
|
|
|
|
break
|
|
|
|
if re.match("[^\s]+\.diff\.gz", link.text):
|
|
|
|
deb_url = link["href"]
|
|
|
|
deb_type = "diff"
|
2012-07-18 14:02:26 +02:00
|
|
|
break
|
|
|
|
|
|
|
|
if not deb_url:
|
|
|
|
logger.error(" Cannot get package from %s" % url)
|
|
|
|
name = raw_input(" Package name in Debian: ")
|
|
|
|
|
2012-07-21 09:22:26 +02:00
|
|
|
return deb_url, deb_type
|
2012-07-18 14:02:26 +02:00
|
|
|
|
|
|
|
|
2012-07-25 13:49:47 +02:00
|
|
|
def patch_metadata(package, watch_data, diff=False):
|
2012-07-24 15:02:36 +02:00
|
|
|
logger.info(" Patching metadata file")
|
|
|
|
|
2012-07-25 13:49:47 +02:00
|
|
|
metadata_path = package.metadata.metadata_path
|
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
with open(metadata_path) as fp:
|
|
|
|
original = fp.read()
|
|
|
|
rindent, indent = guess_indent_values(original)
|
|
|
|
data = original
|
|
|
|
|
|
|
|
# clean watch_data
|
2023-11-14 22:03:53 +01:00
|
|
|
watch_data = "\n".join(
|
|
|
|
[line for line in watch_data.split("\n") if not line.startswith("#")]
|
|
|
|
) # comments
|
2012-07-24 15:02:36 +02:00
|
|
|
|
2012-07-18 14:02:26 +02:00
|
|
|
watch_data = watch_data.replace("\\\n", "") # remove backslashes
|
2012-07-20 16:10:53 +02:00
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
watch_tags = []
|
2012-07-20 16:10:53 +02:00
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
for watch_line in watch_data.split("\n"): # there can be multiple lines
|
|
|
|
watch_line = " ".join(watch_line.split()) # remove extra spaces and \n
|
2012-07-21 09:22:26 +02:00
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
version_parse = re.match("version=(\d+?)", watch_line)
|
|
|
|
if version_parse:
|
|
|
|
version = version_parse.group(1)
|
|
|
|
continue
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
if not watch_line: # skip empty lines
|
|
|
|
continue
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
# parse watch_line
|
2023-11-14 22:03:53 +01:00
|
|
|
result = re.match(r'(?:opts=(?:"([^"]+?)"|([^\s]+?)) )?(.*)', watch_line)
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2012-07-25 10:19:27 +02:00
|
|
|
opts_quote, opts, url = result.groups()
|
|
|
|
opts = opts_quote or opts
|
|
|
|
|
|
|
|
if opts:
|
|
|
|
# clean opts, skip useless ones
|
|
|
|
valid = ("uversionmangle", "versionmangle", "downloadurlmangle")
|
|
|
|
cleaned_opts = []
|
|
|
|
for opt in opts.split(","):
|
2012-08-03 21:50:54 +02:00
|
|
|
opt_name, opt_value = opt.split("=", 1)
|
2012-07-25 10:19:27 +02:00
|
|
|
if opt_name in valid:
|
2012-08-07 09:39:49 +02:00
|
|
|
if opt_name == "uversionmangle":
|
|
|
|
opt_name = "versionmangle"
|
2012-07-25 10:19:27 +02:00
|
|
|
cleaned_opts.append('%s="%s"' % (opt_name, opt_value))
|
|
|
|
opts = " ".join(cleaned_opts)
|
|
|
|
|
|
|
|
# clean url from useless stuff. Just keep <base> [<filepattern>]
|
|
|
|
url_search = re.search(r"^([^\s]+)(?: ([^\s]*\([^\s]+\)[^\s]*))?", url)
|
|
|
|
url = " ".join([x for x in url_search.groups() if x is not None])
|
|
|
|
|
|
|
|
if opts:
|
2023-11-14 22:03:53 +01:00
|
|
|
watch_tag = '%s<watch version="%s" %s>%s</watch>' % (
|
|
|
|
indent,
|
|
|
|
version,
|
|
|
|
opts,
|
|
|
|
url,
|
|
|
|
)
|
2012-07-24 15:02:36 +02:00
|
|
|
else:
|
2023-11-14 22:03:53 +01:00
|
|
|
watch_tag = '%s<watch version="%s">%s</watch>' % (indent, version, url)
|
2012-07-24 15:02:36 +02:00
|
|
|
watch_tags.append(watch_tag)
|
|
|
|
|
|
|
|
watch_tags = "\n".join(watch_tags)
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2023-11-14 22:03:53 +01:00
|
|
|
if "<upstream>" in data:
|
|
|
|
data = data.replace("<upstream>", "<upstream>\n%s" % watch_tags, 1)
|
2012-07-18 14:02:26 +02:00
|
|
|
else:
|
2023-11-14 22:03:53 +01:00
|
|
|
rep = "%s<upstream>\n%s\n%s</upstream>\n</pkgmetadata>" % (
|
|
|
|
rindent,
|
|
|
|
watch_tags,
|
|
|
|
rindent,
|
|
|
|
)
|
|
|
|
data = data.replace("</pkgmetadata>", rep, 1)
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2012-07-22 10:26:40 +02:00
|
|
|
if not diff:
|
|
|
|
return data
|
|
|
|
else:
|
2012-07-25 13:49:47 +02:00
|
|
|
# Generate clean a/category/package/metadata.xml path
|
|
|
|
n = metadata_path.find(package.category)
|
|
|
|
if n != -1:
|
|
|
|
metadata_path = metadata_path[n:]
|
|
|
|
res = unified_diff(
|
|
|
|
original.splitlines(True),
|
|
|
|
data.splitlines(True),
|
2023-11-14 22:03:53 +01:00
|
|
|
fromfile=os.path.join("a/", metadata_path),
|
|
|
|
tofile=os.path.join("b/", metadata_path),
|
2012-07-25 13:49:47 +02:00
|
|
|
)
|
|
|
|
return "".join([x for x in res])
|
2012-07-18 14:02:26 +02:00
|
|
|
|
|
|
|
|
2012-07-22 10:26:40 +02:00
|
|
|
def process_package(query, diff=False):
|
2012-07-24 15:02:36 +02:00
|
|
|
try:
|
|
|
|
matches = Query(query).smart_find(
|
2023-11-14 22:03:53 +01:00
|
|
|
in_installed=True,
|
|
|
|
in_porttree=True,
|
|
|
|
in_overlay=True,
|
|
|
|
include_masked=True,
|
|
|
|
show_progress=False,
|
|
|
|
no_matches_fatal=False,
|
2012-07-24 15:02:36 +02:00
|
|
|
)
|
|
|
|
except AmbiguousPackageName:
|
|
|
|
logger.error(" Ambiguous package name")
|
|
|
|
return None
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2012-07-20 16:10:53 +02:00
|
|
|
if len(matches) == 0:
|
2012-07-18 14:02:26 +02:00
|
|
|
logger.error(" Package not found")
|
2012-07-20 16:10:53 +02:00
|
|
|
return None
|
2012-07-18 14:02:26 +02:00
|
|
|
|
|
|
|
matches = sorted(matches)
|
|
|
|
package = matches.pop()
|
2023-11-14 22:03:53 +01:00
|
|
|
if "9999" in package.version and len(matches) > 0:
|
2012-07-18 14:02:26 +02:00
|
|
|
package = matches.pop()
|
|
|
|
|
|
|
|
watch_data = get_watch_data(package)
|
|
|
|
if watch_data is None:
|
|
|
|
logger.error(" No watch file found")
|
|
|
|
else:
|
2012-07-25 13:49:47 +02:00
|
|
|
return patch_metadata(package, watch_data, diff=diff)
|
2012-07-18 14:02:26 +02:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
import optparse
|
2023-11-14 22:03:53 +01:00
|
|
|
|
2012-07-18 14:02:26 +02:00
|
|
|
p = optparse.OptionParser(
|
|
|
|
usage="usage: %prog <package> [<package> [...]]",
|
|
|
|
)
|
2023-11-14 22:03:53 +01:00
|
|
|
p.add_option(
|
|
|
|
"-d",
|
|
|
|
"--diff",
|
|
|
|
action="store_true",
|
|
|
|
dest="diff",
|
|
|
|
default=False,
|
|
|
|
help="Outputs a diff",
|
|
|
|
)
|
2012-07-18 14:02:26 +02:00
|
|
|
opts, packages = p.parse_args()
|
|
|
|
|
2023-11-14 22:03:53 +01:00
|
|
|
logging.basicConfig(stream=sys.stderr, level=logging.INFO, format="%(message)s")
|
2012-07-18 14:02:26 +02:00
|
|
|
|
|
|
|
for package in packages:
|
|
|
|
logger.info("Processing %s..." % package)
|
2012-07-24 15:02:36 +02:00
|
|
|
result = process_package(package, opts.diff)
|
|
|
|
if result:
|
2012-07-25 13:49:47 +02:00
|
|
|
sys.stdout.write(result)
|
2012-07-18 14:02:26 +02:00
|
|
|
|
2023-11-14 22:03:53 +01:00
|
|
|
|
2012-07-18 14:02:26 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|