Initial python3 compatibility

This commit is contained in:
Bernard Cafarelli
2019-12-05 17:46:19 +01:00
parent 2fedc7450d
commit 94c3eeba49
20 changed files with 62 additions and 61 deletions

View File

@ -68,7 +68,7 @@ def get_metadata(pkg):
output.einfo('Using custom metadata: %s' % meta_override)
if not pkg_metadata:
pkg_metadata = pkg.metadata
except Exception, e:
except Exception as e:
output.ewarn('Error when fetching metadata: %s' % str(e))
if not pkg_metadata:
@ -183,7 +183,7 @@ def scan(pkg, urls, on_progress=None):
metadata = get_metadata(pkg)
versions = []
pkg_handlers = find_handlers('package', metadata.keys())
pkg_handlers = find_handlers('package', list(metadata.keys()))
if not pkg_handlers:
pkg_handler = find_best_handler('package', pkg)
if pkg_handler:

View File

@ -1,5 +1,5 @@
import re
import urllib
import urllib.request, urllib.parse, urllib.error
import portage
@ -34,7 +34,7 @@ def scan_url(pkg, url, options):
project, filename = re.search(berlios_regex, url).groups()
project_page = "http://developer.berlios.de/projects/%s" % project
content = urllib.urlopen(project_page).read()
content = urllib.request.urlopen(project_page).read()
project_id = re.search(
r"/project/filelist.php\?group_id=(\d+)",

View File

@ -1,6 +1,6 @@
import re
import portage
import urllib2
import urllib.request, urllib.error, urllib.parse
import json
from euscan import helpers, output, mangling
@ -52,7 +52,7 @@ def mangle_version(up_pv):
if len(splitted) == 2: # Split second part is sub-groups
part = splitted.pop()
for i in xrange(0, len(part), 3):
for i in range(0, len(part), 3):
splitted.append(part[i:i + 3])
if len(splitted) == 2: # add last group if it's missing
@ -117,7 +117,7 @@ def scan_pkg(pkg, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,4 +1,4 @@
import urllib
import urllib.request, urllib.parse, urllib.error
import re
import bz2
import zlib
@ -23,7 +23,7 @@ def scan_pkg(pkg, options):
output.einfo("Using Debian Packages: " + packages_url)
fp = urllib.urlopen(packages_url)
fp = urllib.request.urlopen(packages_url)
content = fp.read()
# Support for .gz and .bz2 Packages file

View File

@ -1,4 +1,4 @@
import urllib
import urllib.request, urllib.parse, urllib.error
import re
import portage
@ -21,7 +21,7 @@ def scan_pkg(pkg, options):
output.einfo("Using FreeCode handler: " + package)
fp = urllib.urlopen("http://freecode.com/projects/%s/releases" % package)
fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases" % package)
content = fp.read()
result = re.findall(
@ -34,11 +34,11 @@ def scan_pkg(pkg, options):
pv = mangling.mangle_version(up_pv, options)
if helpers.version_filtered(cp, ver, pv):
continue
fp = urllib.urlopen("http://freecode.com/projects/%s/releases/%s" %
fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases/%s" %
(package, release_id))
content = fp.read()
download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
fp = urllib.urlopen("http://freecode.com%s" % download_page)
fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
content = fp.read()
url = re.findall(
r'In case it doesn\'t, click here: <a href="([^"]+)"',

View File

@ -1,7 +1,7 @@
from urlparse import urljoin, urlparse
import urllib2
from urllib.parse import urljoin, urlparse
import urllib.request, urllib.error, urllib.parse
import re
import StringIO
import io
import difflib
try:
@ -75,7 +75,7 @@ def scan_html(data, url, pattern):
def scan_ftp(data, url, pattern):
buf = StringIO.StringIO(data)
buf = io.StringIO(data)
results = []
for line in buf.readlines():
@ -102,7 +102,7 @@ def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,5 +1,5 @@
import json
import urllib2
import urllib.request, urllib.error, urllib.parse
import re
import portage
@ -41,7 +41,7 @@ def scan_url(pkg, url, options):
output.einfo("Using github API for: project=%s user=%s filename=%s" % \
(project, user, filename))
dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
(user, project))
dls = json.load(dlreq)

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
import re
import urllib2
import urllib.request, urllib.error, urllib.parse
try:
import simplejson as json
@ -47,7 +47,7 @@ def scan_pkg(pkg, options):
output.einfo("Using Gnome json cache: " + package)
fp = urllib2.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
fp = urllib.request.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
content = fp.read()
fp.close()

View File

@ -1,6 +1,6 @@
import re
import portage
import urllib2
import urllib.request, urllib.error, urllib.parse
import xml.dom.minidom
from euscan import helpers, output, mangling
@ -40,7 +40,7 @@ def scan_pkg(pkg, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,4 +1,4 @@
import xmlrpclib
import xmlrpc.client
import re
import portage
@ -36,7 +36,7 @@ def scan_pkg(pkg, options):
output.einfo("Using PyPi XMLRPC: " + package)
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
versions = client.package_releases(package)
if not versions:

View File

@ -1,7 +1,7 @@
import re
import portage
import json
import urllib2
import urllib.request, urllib.error, urllib.parse
from euscan import helpers, output, mangling
@ -50,7 +50,7 @@ def scan_pkg(pkg, options):
try:
fp = helpers.urlopen(url)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []

View File

@ -1,7 +1,7 @@
import re
import urllib2
import urllib.request, urllib.error, urllib.parse
import generic
from euscan.handlers import generic
from euscan import output, helpers
PRIORITY = 100
@ -37,7 +37,7 @@ def handle_directory_patterns(base, file_pattern):
try:
fp = helpers.urlopen(basedir)
except urllib2.URLError:
except urllib.error.URLError:
return []
except IOError:
return []