Initial python3 compatibility

This commit is contained in:
Bernard Cafarelli 2019-12-05 17:46:19 +01:00
parent 2fedc7450d
commit 94c3eeba49
No known key found for this signature in database
GPG Key ID: 5A761FC3AEC20F13
20 changed files with 62 additions and 61 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
.*.swp
*~
*.py[co]
*.egg-info

View File

@ -4,7 +4,7 @@
Distributed under the terms of the GNU General Public License v2
"""
from __future__ import print_function
import os
# Meta
@ -21,7 +21,7 @@ __description__ = "A tool to detect new upstream releases."
import sys
import getopt
from errno import EINTR, EINVAL
from httplib import HTTPConnection
from http.client import HTTPConnection
from portage import settings
from portage.output import white, yellow, turquoise, green
@ -270,7 +270,7 @@ def main():
on_progress = None
if CONFIG['progress']:
on_progress_gen = progress_bar()
on_progress = on_progress_gen.next()
on_progress = next(on_progress_gen)
on_progress(maxval=len(queries) * 100, increment=0, label="Working...")
# Importing stuff here for performance reasons
@ -322,7 +322,7 @@ def main():
print("")
if CONFIG['progress']:
on_progress_gen.next()
next(on_progress_gen)
print("\n", file=sys.stderr)
output.set_query(None)

View File

@ -5,7 +5,7 @@
__version__ = "git"
import ConfigParser
import configparser
import os
from ast import literal_eval
@ -33,7 +33,7 @@ CONFIG = {
'handlers-exclude': [],
}
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
config.read(['/etc/euscan.conf', os.path.expanduser('~/.euscan.conf')])
if config.has_section("euscan"):
for key, value in config.items("euscan"):
@ -88,5 +88,5 @@ ROBOTS_TXT_BLACKLIST_DOMAINS = [
'(.*)festvox\.org(.*)',
]
from out import EuscanOutput
from euscan.out import EuscanOutput
output = EuscanOutput(CONFIG)

View File

@ -68,7 +68,7 @@ def get_metadata(pkg):
output.einfo('Using custom metadata: %s' % meta_override)
if not pkg_metadata:
pkg_metadata = pkg.metadata
except Exception, e:
except Exception as e:
output.ewarn('Error when fetching metadata: %s' % str(e))
if not pkg_metadata:
@ -183,7 +183,7 @@ def scan(pkg, urls, on_progress=None):
metadata = get_metadata(pkg)
versions = []
pkg_handlers = find_handlers('package', metadata.keys())
pkg_handlers = find_handlers('package', list(metadata.keys()))
if not pkg_handlers:
pkg_handler = find_best_handler('package', pkg)
if pkg_handler:

View File

@ -1,5 +1,5 @@
import re
import urllib
import urllib.request, urllib.parse, urllib.error
import portage
@ -34,7 +34,7 @@ def scan_url(pkg, url, options):
project, filename = re.search(berlios_regex, url).groups()
project_page = "http://developer.berlios.de/projects/%s" % project
content = urllib.urlopen(project_page).read()
content = urllib.request.urlopen(project_page).read()
project_id = re.search(
r"/project/filelist.php\?group_id=(\d+)",

View File

@ -1,6 +1,6 @@
import re
import portage
import urllib2
import urllib.request, urllib.error, urllib.parse
import json
from euscan import helpers, output, mangling
@ -52,7 +52,7 @@ def mangle_version(up_pv):
if len(splitted) == 2: # Split second part is sub-groups
part = splitted.pop()
for i in xrange(0, len(part), 3):
for i in range(0, len(part), 3):
splitted.append(part[i:i + 3])
if len(splitted) == 2: # add last group if it's missing
@ -117,7 +117,7 @@ def scan_pkg(pkg, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,4 +1,4 @@
import urllib
import urllib.request, urllib.parse, urllib.error
import re
import bz2
import zlib
@ -23,7 +23,7 @@ def scan_pkg(pkg, options):
output.einfo("Using Debian Packages: " + packages_url)
fp = urllib.urlopen(packages_url)
fp = urllib.request.urlopen(packages_url)
content = fp.read()
# Support for .gz and .bz2 Packages file

View File

@ -1,4 +1,4 @@
import urllib
import urllib.request, urllib.parse, urllib.error
import re
import portage
@ -21,7 +21,7 @@ def scan_pkg(pkg, options):
output.einfo("Using FreeCode handler: " + package)
fp = urllib.urlopen("http://freecode.com/projects/%s/releases" % package)
fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases" % package)
content = fp.read()
result = re.findall(
@ -34,11 +34,11 @@ def scan_pkg(pkg, options):
pv = mangling.mangle_version(up_pv, options)
if helpers.version_filtered(cp, ver, pv):
continue
fp = urllib.urlopen("http://freecode.com/projects/%s/releases/%s" %
fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases/%s" %
(package, release_id))
content = fp.read()
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
fp = urllib.urlopen("http://freecode.com%s" % download_page)
fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
content = fp.read()
url = re.findall(
r'In case it doesn\'t, click here: <a href="([^"]+)"',

View File

@ -1,7 +1,7 @@
from urlparse import urljoin, urlparse
import urllib2
from urllib.parse import urljoin, urlparse
import urllib.request, urllib.error, urllib.parse
import re
import StringIO
import io
import difflib
try:
@ -75,7 +75,7 @@ def scan_html(data, url, pattern):
def scan_ftp(data, url, pattern):
buf = StringIO.StringIO(data)
buf = io.StringIO(data)
results = []
for line in buf.readlines():
@ -102,7 +102,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,5 +1,5 @@
import json
import urllib2
import urllib.request, urllib.error, urllib.parse
import re
import portage
@ -41,7 +41,7 @@ def scan_url(pkg, url, options):
output.einfo("Using github API for: project=%s user=%s filename=%s" % \
(project, user, filename))
dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
(user, project))
dls = json.load(dlreq)

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
import re
import urllib2
import urllib.request, urllib.error, urllib.parse
try:
import simplejson as json
@ -47,7 +47,7 @@ def scan_pkg(pkg, options):
output.einfo("Using Gnome json cache: " + package)
fp = urllib2.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
fp = urllib.request.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
content = fp.read()
fp.close()

View File

@ -1,6 +1,6 @@
import re
import portage
import urllib2
import urllib.request, urllib.error, urllib.parse
import xml.dom.minidom
from euscan import helpers, output, mangling
@ -40,7 +40,7 @@ def scan_pkg(pkg, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,4 +1,4 @@
import xmlrpclib
import xmlrpc.client
import re
import portage
@ -36,7 +36,7 @@ def scan_pkg(pkg, options):
output.einfo("Using PyPi XMLRPC: " + package)
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
versions = client.package_releases(package)
if not versions:

View File

@ -1,7 +1,7 @@
import re
import portage
import json
import urllib2
import urllib.request, urllib.error, urllib.parse
from euscan import helpers, output, mangling
@ -50,7 +50,7 @@ def scan_pkg(pkg, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,7 +1,7 @@
import re
import urllib2
import urllib.request, urllib.error, urllib.parse
import generic
from euscan.handlers import generic
from euscan import output, helpers
PRIORITY = 100
@ -37,7 +37,7 @@ def handle_directory_patterns(base, file_pattern):
try:
fp = helpers.urlopen(basedir)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,7 +1,7 @@
import os
import re
import errno
import urllib2
import urllib.request, urllib.error, urllib.parse
from xml.dom.minidom import Document
import portage
@ -11,8 +11,8 @@ try:
from urllib import robotparser
from urllib import urlparse
except ImportError:
import robotparser
import urlparse
import urllib.robotparser
import urllib.parse
import euscan
from euscan import CONFIG, BLACKLIST_VERSIONS, ROBOTS_TXT_BLACKLIST_DOMAINS
@ -173,7 +173,7 @@ def url_from_template(url, version):
# Used for brute force to increment the version
def split_version(version):
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
components = filter(lambda x: x and x != '.', component_re.split(version))
components = [x for x in component_re.split(version) if x and x != '.']
for i in range(len(components)):
try:
components[i] = int(components[i])
@ -236,7 +236,7 @@ def timeout_for_url(url):
return timeout
class HeadRequest(urllib2.Request):
class HeadRequest(urllib.request.Request):
def get_method(self):
return "HEAD"
@ -249,7 +249,7 @@ def urlallowed(url):
if CONFIG['skip-robots-txt']:
return True
protocol, domain = urlparse.urlparse(url)[:2]
protocol, domain = urllib.parse.urlparse(url)[:2]
for bd in ROBOTS_TXT_BLACKLIST_DOMAINS:
if re.match(bd, domain):
@ -263,7 +263,7 @@ def urlallowed(url):
return True
baseurl = '%s://%s' % (protocol, domain)
robotsurl = urlparse.urljoin(baseurl, 'robots.txt')
robotsurl = urllib.parse.urljoin(baseurl, 'robots.txt')
if baseurl in rpcache:
rp = rpcache[baseurl]
@ -273,7 +273,7 @@ def urlallowed(url):
timeout = getdefaulttimeout()
setdefaulttimeout(5)
rp = robotparser.RobotFileParser()
rp = urllib.robotparser.RobotFileParser()
rp.set_url(robotsurl)
try:
rp.read()
@ -295,7 +295,7 @@ def urlopen(url, timeout=None, verb="GET"):
timeout = timeout_for_url(url)
if verb == 'GET':
request = urllib2.Request(url)
request = urllib.request.Request(url)
elif verb == 'HEAD':
request = HeadRequest(url)
else:
@ -311,9 +311,9 @@ def urlopen(url, timeout=None, verb="GET"):
if CONFIG['verbose']:
debuglevel = CONFIG['verbose'] - 1
handlers.append(urllib2.HTTPHandler(debuglevel=debuglevel))
handlers.append(urllib.request.HTTPHandler(debuglevel=debuglevel))
opener = urllib2.build_opener(*handlers)
opener = urllib.request.build_opener(*handlers)
return opener.open(request, None, timeout)
@ -361,7 +361,7 @@ def tryurl(fileurl, template):
if result:
result = (fp.geturl(), fp.info())
except urllib2.URLError:
except urllib.error.URLError:
result = None
except IOError:
result = None
@ -462,7 +462,7 @@ def dict_to_xml(data, indent):
def _set_value(parent, value):
if isinstance(value, dict):
for k, v in value.iteritems():
for k, v in list(value.items()):
node = doc.createElement(k)
_set_value(node, v)
parent.appendChild(node)
@ -473,10 +473,10 @@ def dict_to_xml(data, indent):
node.appendChild(text)
parent.appendChild(node)
else:
text = doc.createTextNode(unicode(value))
text = doc.createTextNode(str(value))
parent.appendChild(text)
for key, value in data.iteritems():
for key, value in list(data.items()):
node = doc.createElement("package")
node.setAttribute("name", key)
_set_value(node, value)

View File

@ -143,7 +143,7 @@ def gentoo_mangle_version(up_pv):
rev = rev_match.group(3)
additional_version = '_p' + rev
for this_suf in suf_matches.keys():
for this_suf in list(suf_matches.keys()):
if rs_match:
break
for regex in suf_matches[this_suf]:

View File

@ -230,16 +230,16 @@ class EuscanOutput(object):
)
else:
if not self.config['quiet']:
print "Upstream Version:", pp.number("%s" % version),
print pp.path(" %s" % urls)
print("Upstream Version:", pp.number("%s" % version), end=' ')
print(pp.path(" %s" % urls))
else:
print pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls)
print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
def metadata(self, key, value, show=True):
if self.config["format"]:
self.queries[self.current_query]["metadata"][key] = value
elif show:
print "%s: %s" % (key.capitalize(), value)
print("%s: %s" % (key.capitalize(), value))
def __getattr__(self, key):
if not self.config["quiet"] and self.current_query is not None:

View File

@ -1,4 +1,4 @@
from __future__ import print_function
import os
import sys

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
from __future__ import print_function
import re
import sys
@ -46,7 +46,7 @@ class set_version(Command):
def run(self):
ver = 'git' if __version__ == '9999' else __version__
print("Settings version to %s" % ver)
print(("Settings version to %s" % ver))
def sub(files, pattern):
for f in files: