Initial python3 compatibility

This commit is contained in:
Bernard Cafarelli 2019-12-05 17:46:19 +01:00
parent 2fedc7450d
commit 94c3eeba49
No known key found for this signature in database
GPG Key ID: 5A761FC3AEC20F13
20 changed files with 62 additions and 61 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
.*.swp
*~ *~
*.py[co] *.py[co]
*.egg-info *.egg-info

View File

@ -4,7 +4,7 @@
Distributed under the terms of the GNU General Public License v2 Distributed under the terms of the GNU General Public License v2
""" """
from __future__ import print_function
import os import os
# Meta # Meta
@ -21,7 +21,7 @@ __description__ = "A tool to detect new upstream releases."
import sys import sys
import getopt import getopt
from errno import EINTR, EINVAL from errno import EINTR, EINVAL
from httplib import HTTPConnection from http.client import HTTPConnection
from portage import settings from portage import settings
from portage.output import white, yellow, turquoise, green from portage.output import white, yellow, turquoise, green
@ -270,7 +270,7 @@ def main():
on_progress = None on_progress = None
if CONFIG['progress']: if CONFIG['progress']:
on_progress_gen = progress_bar() on_progress_gen = progress_bar()
on_progress = on_progress_gen.next() on_progress = next(on_progress_gen)
on_progress(maxval=len(queries) * 100, increment=0, label="Working...") on_progress(maxval=len(queries) * 100, increment=0, label="Working...")
# Importing stuff here for performance reasons # Importing stuff here for performance reasons
@ -322,7 +322,7 @@ def main():
print("") print("")
if CONFIG['progress']: if CONFIG['progress']:
on_progress_gen.next() next(on_progress_gen)
print("\n", file=sys.stderr) print("\n", file=sys.stderr)
output.set_query(None) output.set_query(None)

View File

@ -5,7 +5,7 @@
__version__ = "git" __version__ = "git"
import ConfigParser import configparser
import os import os
from ast import literal_eval from ast import literal_eval
@ -33,7 +33,7 @@ CONFIG = {
'handlers-exclude': [], 'handlers-exclude': [],
} }
config = ConfigParser.ConfigParser() config = configparser.ConfigParser()
config.read(['/etc/euscan.conf', os.path.expanduser('~/.euscan.conf')]) config.read(['/etc/euscan.conf', os.path.expanduser('~/.euscan.conf')])
if config.has_section("euscan"): if config.has_section("euscan"):
for key, value in config.items("euscan"): for key, value in config.items("euscan"):
@ -88,5 +88,5 @@ ROBOTS_TXT_BLACKLIST_DOMAINS = [
'(.*)festvox\.org(.*)', '(.*)festvox\.org(.*)',
] ]
from out import EuscanOutput from euscan.out import EuscanOutput
output = EuscanOutput(CONFIG) output = EuscanOutput(CONFIG)

View File

@ -68,7 +68,7 @@ def get_metadata(pkg):
output.einfo('Using custom metadata: %s' % meta_override) output.einfo('Using custom metadata: %s' % meta_override)
if not pkg_metadata: if not pkg_metadata:
pkg_metadata = pkg.metadata pkg_metadata = pkg.metadata
except Exception, e: except Exception as e:
output.ewarn('Error when fetching metadata: %s' % str(e)) output.ewarn('Error when fetching metadata: %s' % str(e))
if not pkg_metadata: if not pkg_metadata:
@ -183,7 +183,7 @@ def scan(pkg, urls, on_progress=None):
metadata = get_metadata(pkg) metadata = get_metadata(pkg)
versions = [] versions = []
pkg_handlers = find_handlers('package', metadata.keys()) pkg_handlers = find_handlers('package', list(metadata.keys()))
if not pkg_handlers: if not pkg_handlers:
pkg_handler = find_best_handler('package', pkg) pkg_handler = find_best_handler('package', pkg)
if pkg_handler: if pkg_handler:

View File

@ -1,5 +1,5 @@
import re import re
import urllib import urllib.request, urllib.parse, urllib.error
import portage import portage
@ -34,7 +34,7 @@ def scan_url(pkg, url, options):
project, filename = re.search(berlios_regex, url).groups() project, filename = re.search(berlios_regex, url).groups()
project_page = "http://developer.berlios.de/projects/%s" % project project_page = "http://developer.berlios.de/projects/%s" % project
content = urllib.urlopen(project_page).read() content = urllib.request.urlopen(project_page).read()
project_id = re.search( project_id = re.search(
r"/project/filelist.php\?group_id=(\d+)", r"/project/filelist.php\?group_id=(\d+)",

View File

@ -1,6 +1,6 @@
import re import re
import portage import portage
import urllib2 import urllib.request, urllib.error, urllib.parse
import json import json
from euscan import helpers, output, mangling from euscan import helpers, output, mangling
@ -52,7 +52,7 @@ def mangle_version(up_pv):
if len(splitted) == 2: # Split second part is sub-groups if len(splitted) == 2: # Split second part is sub-groups
part = splitted.pop() part = splitted.pop()
for i in xrange(0, len(part), 3): for i in range(0, len(part), 3):
splitted.append(part[i:i + 3]) splitted.append(part[i:i + 3])
if len(splitted) == 2: # add last group if it's missing if len(splitted) == 2: # add last group if it's missing
@ -117,7 +117,7 @@ def scan_pkg(pkg, options):
try: try:
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib2.URLError: except urllib.error.URLError:
return [] return []
except IOError: except IOError:
return [] return []

View File

@ -1,4 +1,4 @@
import urllib import urllib.request, urllib.parse, urllib.error
import re import re
import bz2 import bz2
import zlib import zlib
@ -23,7 +23,7 @@ def scan_pkg(pkg, options):
output.einfo("Using Debian Packages: " + packages_url) output.einfo("Using Debian Packages: " + packages_url)
fp = urllib.urlopen(packages_url) fp = urllib.request.urlopen(packages_url)
content = fp.read() content = fp.read()
# Support for .gz and .bz2 Packages file # Support for .gz and .bz2 Packages file

View File

@ -1,4 +1,4 @@
import urllib import urllib.request, urllib.parse, urllib.error
import re import re
import portage import portage
@ -21,7 +21,7 @@ def scan_pkg(pkg, options):
output.einfo("Using FreeCode handler: " + package) output.einfo("Using FreeCode handler: " + package)
fp = urllib.urlopen("http://freecode.com/projects/%s/releases" % package) fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases" % package)
content = fp.read() content = fp.read()
result = re.findall( result = re.findall(
@ -34,11 +34,11 @@ def scan_pkg(pkg, options):
pv = mangling.mangle_version(up_pv, options) pv = mangling.mangle_version(up_pv, options)
if helpers.version_filtered(cp, ver, pv): if helpers.version_filtered(cp, ver, pv):
continue continue
fp = urllib.urlopen("http://freecode.com/projects/%s/releases/%s" % fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases/%s" %
(package, release_id)) (package, release_id))
content = fp.read() content = fp.read()
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0] download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
fp = urllib.urlopen("http://freecode.com%s" % download_page) fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
content = fp.read() content = fp.read()
url = re.findall( url = re.findall(
r'In case it doesn\'t, click here: <a href="([^"]+)"', r'In case it doesn\'t, click here: <a href="([^"]+)"',

View File

@ -1,7 +1,7 @@
from urlparse import urljoin, urlparse from urllib.parse import urljoin, urlparse
import urllib2 import urllib.request, urllib.error, urllib.parse
import re import re
import StringIO import io
import difflib import difflib
try: try:
@ -75,7 +75,7 @@ def scan_html(data, url, pattern):
def scan_ftp(data, url, pattern): def scan_ftp(data, url, pattern):
buf = StringIO.StringIO(data) buf = io.StringIO(data)
results = [] results = []
for line in buf.readlines(): for line in buf.readlines():
@ -102,7 +102,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
try: try:
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib2.URLError: except urllib.error.URLError:
return [] return []
except IOError: except IOError:
return [] return []

View File

@ -1,5 +1,5 @@
import json import json
import urllib2 import urllib.request, urllib.error, urllib.parse
import re import re
import portage import portage
@ -41,7 +41,7 @@ def scan_url(pkg, url, options):
output.einfo("Using github API for: project=%s user=%s filename=%s" % \ output.einfo("Using github API for: project=%s user=%s filename=%s" % \
(project, user, filename)) (project, user, filename))
dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \ dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
(user, project)) (user, project))
dls = json.load(dlreq) dls = json.load(dlreq)

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import re import re
import urllib2 import urllib.request, urllib.error, urllib.parse
try: try:
import simplejson as json import simplejson as json
@ -47,7 +47,7 @@ def scan_pkg(pkg, options):
output.einfo("Using Gnome json cache: " + package) output.einfo("Using Gnome json cache: " + package)
fp = urllib2.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json'])) fp = urllib.request.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
content = fp.read() content = fp.read()
fp.close() fp.close()

View File

@ -1,6 +1,6 @@
import re import re
import portage import portage
import urllib2 import urllib.request, urllib.error, urllib.parse
import xml.dom.minidom import xml.dom.minidom
from euscan import helpers, output, mangling from euscan import helpers, output, mangling
@ -40,7 +40,7 @@ def scan_pkg(pkg, options):
try: try:
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib2.URLError: except urllib.error.URLError:
return [] return []
except IOError: except IOError:
return [] return []

View File

@ -1,4 +1,4 @@
import xmlrpclib import xmlrpc.client
import re import re
import portage import portage
@ -36,7 +36,7 @@ def scan_pkg(pkg, options):
output.einfo("Using PyPi XMLRPC: " + package) output.einfo("Using PyPi XMLRPC: " + package)
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
versions = client.package_releases(package) versions = client.package_releases(package)
if not versions: if not versions:

View File

@ -1,7 +1,7 @@
import re import re
import portage import portage
import json import json
import urllib2 import urllib.request, urllib.error, urllib.parse
from euscan import helpers, output, mangling from euscan import helpers, output, mangling
@ -50,7 +50,7 @@ def scan_pkg(pkg, options):
try: try:
fp = helpers.urlopen(url) fp = helpers.urlopen(url)
except urllib2.URLError: except urllib.error.URLError:
return [] return []
except IOError: except IOError:
return [] return []

View File

@ -1,7 +1,7 @@
import re import re
import urllib2 import urllib.request, urllib.error, urllib.parse
import generic from euscan.handlers import generic
from euscan import output, helpers from euscan import output, helpers
PRIORITY = 100 PRIORITY = 100
@ -37,7 +37,7 @@ def handle_directory_patterns(base, file_pattern):
try: try:
fp = helpers.urlopen(basedir) fp = helpers.urlopen(basedir)
except urllib2.URLError: except urllib.error.URLError:
return [] return []
except IOError: except IOError:
return [] return []

View File

@ -1,7 +1,7 @@
import os import os
import re import re
import errno import errno
import urllib2 import urllib.request, urllib.error, urllib.parse
from xml.dom.minidom import Document from xml.dom.minidom import Document
import portage import portage
@ -11,8 +11,8 @@ try:
from urllib import robotparser from urllib import robotparser
from urllib import urlparse from urllib import urlparse
except ImportError: except ImportError:
import robotparser import urllib.robotparser
import urlparse import urllib.parse
import euscan import euscan
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS
@ -173,7 +173,7 @@ def url_from_template(url, version):
# Used for brute force to increment the version # Used for brute force to increment the version
def split_version(version): def split_version(version):
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
components = filter(lambda x: x and x != '.', component_re.split(version)) components = [x for x in component_re.split(version) if x and x != '.']
for i in range(len(components)): for i in range(len(components)):
try: try:
components[i] = int(components[i]) components[i] = int(components[i])
@ -236,7 +236,7 @@ def timeout_for_url(url):
return timeout return timeout
class HeadRequest(urllib2.Request): class HeadRequest(urllib.request.Request):
def get_method(self): def get_method(self):
return "HEAD" return "HEAD"
@ -249,7 +249,7 @@ def urlallowed(url):
if CONFIG['skip-robots-txt']: if CONFIG['skip-robots-txt']:
return True return True
protocol, domain = urlparse.urlparse(url)[:2] protocol, domain = urllib.parse.urlparse(url)[:2]
for bd in ROBOTS_TXT_BLACKLIST_DOMAINS: for bd in ROBOTS_TXT_BLACKLIST_DOMAINS:
if re.match(bd, domain): if re.match(bd, domain):
@ -263,7 +263,7 @@ def urlallowed(url):
return True return True
baseurl = '%s://%s' % (protocol, domain) baseurl = '%s://%s' % (protocol, domain)
robotsurl = urlparse.urljoin(baseurl, 'robots.txt') robotsurl = urllib.parse.urljoin(baseurl, 'robots.txt')
if baseurl in rpcache: if baseurl in rpcache:
rp = rpcache[baseurl] rp = rpcache[baseurl]
@ -273,7 +273,7 @@ def urlallowed(url):
timeout = getdefaulttimeout() timeout = getdefaulttimeout()
setdefaulttimeout(5) setdefaulttimeout(5)
rp = robotparser.RobotFileParser() rp = urllib.robotparser.RobotFileParser()
rp.set_url(robotsurl) rp.set_url(robotsurl)
try: try:
rp.read() rp.read()
@ -295,7 +295,7 @@ def urlopen(url, timeout=None, verb="GET"):
timeout = timeout_for_url(url) timeout = timeout_for_url(url)
if verb == 'GET': if verb == 'GET':
request = urllib2.Request(url) request = urllib.request.Request(url)
elif verb == 'HEAD': elif verb == 'HEAD':
request = HeadRequest(url) request = HeadRequest(url)
else: else:
@ -311,9 +311,9 @@ def urlopen(url, timeout=None, verb="GET"):
if CONFIG['verbose']: if CONFIG['verbose']:
debuglevel = CONFIG['verbose'] - 1 debuglevel = CONFIG['verbose'] - 1
handlers.append(urllib2.HTTPHandler(debuglevel=debuglevel)) handlers.append(urllib.request.HTTPHandler(debuglevel=debuglevel))
opener = urllib2.build_opener(*handlers) opener = urllib.request.build_opener(*handlers)
return opener.open(request, None, timeout) return opener.open(request, None, timeout)
@ -361,7 +361,7 @@ def tryurl(fileurl, template):
if result: if result:
result = (fp.geturl(), fp.info()) result = (fp.geturl(), fp.info())
except urllib2.URLError: except urllib.error.URLError:
result = None result = None
except IOError: except IOError:
result = None result = None
@ -462,7 +462,7 @@ def dict_to_xml(data, indent):
def _set_value(parent, value): def _set_value(parent, value):
if isinstance(value, dict): if isinstance(value, dict):
for k, v in value.iteritems(): for k, v in list(value.items()):
node = doc.createElement(k) node = doc.createElement(k)
_set_value(node, v) _set_value(node, v)
parent.appendChild(node) parent.appendChild(node)
@ -473,10 +473,10 @@ def dict_to_xml(data, indent):
node.appendChild(text) node.appendChild(text)
parent.appendChild(node) parent.appendChild(node)
else: else:
text = doc.createTextNode(unicode(value)) text = doc.createTextNode(str(value))
parent.appendChild(text) parent.appendChild(text)
for key, value in data.iteritems(): for key, value in list(data.items()):
node = doc.createElement("package") node = doc.createElement("package")
node.setAttribute("name", key) node.setAttribute("name", key)
_set_value(node, value) _set_value(node, value)

View File

@ -143,7 +143,7 @@ def gentoo_mangle_version(up_pv):
rev = rev_match.group(3) rev = rev_match.group(3)
additional_version = '_p' + rev additional_version = '_p' + rev
for this_suf in suf_matches.keys(): for this_suf in list(suf_matches.keys()):
if rs_match: if rs_match:
break break
for regex in suf_matches[this_suf]: for regex in suf_matches[this_suf]:

View File

@ -230,16 +230,16 @@ class EuscanOutput(object):
) )
else: else:
if not self.config['quiet']: if not self.config['quiet']:
print "Upstream Version:", pp.number("%s" % version), print("Upstream Version:", pp.number("%s" % version), end=' ')
print pp.path(" %s" % urls) print(pp.path(" %s" % urls))
else: else:
print pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls) print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
def metadata(self, key, value, show=True): def metadata(self, key, value, show=True):
if self.config["format"]: if self.config["format"]:
self.queries[self.current_query]["metadata"][key] = value self.queries[self.current_query]["metadata"][key] = value
elif show: elif show:
print "%s: %s" % (key.capitalize(), value) print("%s: %s" % (key.capitalize(), value))
def __getattr__(self, key): def __getattr__(self, key):
if not self.config["quiet"] and self.current_query is not None: if not self.config["quiet"] and self.current_query is not None:

View File

@ -1,4 +1,4 @@
from __future__ import print_function
import os import os
import sys import sys

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import re import re
import sys import sys
@ -46,7 +46,7 @@ class set_version(Command):
def run(self): def run(self):
ver = 'git' if __version__ == '9999' else __version__ ver = 'git' if __version__ == '9999' else __version__
print("Settings version to %s" % ver) print(("Settings version to %s" % ver))
def sub(files, pattern): def sub(files, pattern):
for f in files: for f in files: