2012-07-27 11:26:54 +02:00
|
|
|
from urlparse import urljoin, urlparse
|
2011-08-31 15:38:32 +02:00
|
|
|
import urllib2
|
|
|
|
import re
|
|
|
|
import StringIO
|
2012-07-27 11:26:54 +02:00
|
|
|
import difflib
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-06-08 09:18:59 +02:00
|
|
|
try:
|
|
|
|
from BeautifulSoup import BeautifulSoup
|
|
|
|
except ImportError:
|
|
|
|
from bs4 import BeautifulSoup
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
import portage
|
|
|
|
|
2012-08-07 16:42:27 +02:00
|
|
|
from euscan import output, helpers, mangling, CONFIG, SCANDIR_BLACKLIST_URLS, \
|
|
|
|
BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS
|
2012-05-23 16:30:43 +02:00
|
|
|
|
|
|
|
HANDLER_NAME = "generic"
|
2012-07-27 11:26:54 +02:00
|
|
|
CONFIDENCE = 45
|
2012-06-28 11:08:05 +02:00
|
|
|
PRIORITY = 0
|
2012-05-23 16:44:44 +02:00
|
|
|
|
2012-05-23 16:30:43 +02:00
|
|
|
BRUTEFORCE_HANDLER_NAME = "brute_force"
|
2012-07-27 11:26:54 +02:00
|
|
|
BRUTEFORCE_CONFIDENCE = 30
|
|
|
|
|
|
|
|
|
|
|
|
def confidence_score(found, original, minimum=CONFIDENCE):
|
|
|
|
found_p = urlparse(found)
|
|
|
|
original_p = urlparse(original)
|
|
|
|
|
|
|
|
# check if the base url is the same
|
|
|
|
if found_p.netloc != original_p.netloc:
|
|
|
|
return minimum
|
|
|
|
|
|
|
|
# check if the directory depth is the same
|
|
|
|
if len(found_p.path.split("/")) != len(original_p.path.split("/")):
|
|
|
|
return minimum
|
|
|
|
|
|
|
|
# strip numbers
|
|
|
|
found_path = re.sub(r"[\d+\.]?", "", found_p.path)
|
|
|
|
original_path = re.sub(r"[\d+\.]?", "", original_p.path)
|
|
|
|
|
|
|
|
# strip the first equal part of the path
|
|
|
|
i = 0
|
|
|
|
max_i = len(found_path)
|
|
|
|
while i < max_i and found_path[i] == original_path[i]:
|
|
|
|
i += 1
|
|
|
|
found_path = found_path[i:]
|
|
|
|
original_path = original_path[i:]
|
|
|
|
|
|
|
|
# calculate difference ratio
|
|
|
|
diff = difflib.SequenceMatcher(None, found_path, original_path).ratio()
|
|
|
|
return int(minimum + minimum * diff) # maximum score is minimum * 2
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
def scan_html(data, url, pattern):
|
|
|
|
soup = BeautifulSoup(data)
|
|
|
|
results = []
|
|
|
|
|
|
|
|
for link in soup.findAll('a'):
|
|
|
|
href = link.get("href")
|
|
|
|
if not href:
|
|
|
|
continue
|
2011-12-30 11:34:39 +01:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
if href.startswith(url):
|
|
|
|
href = href.replace(url, "", 1)
|
|
|
|
|
2012-08-07 08:44:26 +02:00
|
|
|
match = re.search(pattern, href, re.I)
|
2011-08-31 15:38:32 +02:00
|
|
|
if match:
|
2012-07-25 10:44:15 +02:00
|
|
|
results.append(
|
|
|
|
(".".join([x for x in match.groups() if x is not None]),
|
|
|
|
match.group(0))
|
|
|
|
)
|
2012-08-03 21:50:54 +02:00
|
|
|
|
2012-09-15 23:20:30 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
return results
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
def scan_ftp(data, url, pattern):
|
|
|
|
buf = StringIO.StringIO(data)
|
|
|
|
results = []
|
|
|
|
|
|
|
|
for line in buf.readlines():
|
|
|
|
line = line.replace("\n", "").replace("\r", "")
|
|
|
|
match = re.search(pattern, line, re.I)
|
|
|
|
if match:
|
2012-07-25 10:44:15 +02:00
|
|
|
results.append(
|
|
|
|
(".".join([x for x in match.groups() if x is not None]),
|
|
|
|
match.group(0))
|
|
|
|
)
|
2011-08-31 15:38:32 +02:00
|
|
|
return results
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2012-08-03 21:50:54 +02:00
|
|
|
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
|
2011-08-31 15:38:32 +02:00
|
|
|
if not steps:
|
|
|
|
return []
|
|
|
|
|
|
|
|
url += steps[0][0]
|
|
|
|
pattern = steps[0][1]
|
|
|
|
|
|
|
|
steps = steps[1:]
|
|
|
|
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Scanning: %s" % url)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
fp = helpers.urlopen(url)
|
|
|
|
except urllib2.URLError:
|
|
|
|
return []
|
|
|
|
except IOError:
|
|
|
|
return []
|
|
|
|
|
2011-09-06 16:32:29 +02:00
|
|
|
if not fp:
|
|
|
|
return []
|
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
data = fp.read()
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
2012-07-24 15:02:36 +02:00
|
|
|
if re.search("<\s*a\s+[^>]*href", data, re.I):
|
2011-08-31 15:38:32 +02:00
|
|
|
results.extend(scan_html(data, url, pattern))
|
|
|
|
elif url.startswith('ftp://'):
|
|
|
|
results.extend(scan_ftp(data, url, pattern))
|
|
|
|
|
|
|
|
versions = []
|
|
|
|
|
2012-04-19 15:36:44 +02:00
|
|
|
for up_pv, path in results:
|
2012-08-03 21:50:54 +02:00
|
|
|
pv = mangling.mangle_version(up_pv, options)
|
|
|
|
|
2012-04-19 15:36:44 +02:00
|
|
|
if helpers.version_filtered(cp, ver, pv):
|
2011-08-31 15:38:32 +02:00
|
|
|
continue
|
2012-07-25 10:44:15 +02:00
|
|
|
if not url.endswith("/"):
|
|
|
|
url = url + "/"
|
2012-07-24 15:02:36 +02:00
|
|
|
path = urljoin(url, path)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-03-28 09:08:22 +02:00
|
|
|
if not steps and path not in orig_url:
|
2012-07-27 11:26:54 +02:00
|
|
|
confidence = confidence_score(path, orig_url)
|
2012-08-03 21:50:54 +02:00
|
|
|
path = mangling.mangle_url(path, options)
|
2012-07-27 11:26:54 +02:00
|
|
|
versions.append((path, pv, HANDLER_NAME, confidence))
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
if steps:
|
2012-08-07 09:39:49 +02:00
|
|
|
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
|
|
|
|
options)
|
2011-08-31 15:38:32 +02:00
|
|
|
versions.extend(ret)
|
|
|
|
|
|
|
|
return versions
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2012-08-03 21:50:54 +02:00
|
|
|
def scan_url(pkg, url, options):
|
2012-07-26 10:44:10 +02:00
|
|
|
if CONFIG["scan-dir"]:
|
|
|
|
for bu in SCANDIR_BLACKLIST_URLS:
|
|
|
|
if re.match(bu, url):
|
|
|
|
output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
|
|
|
return []
|
|
|
|
|
|
|
|
resolved_url = helpers.parse_mirror(url)
|
|
|
|
if not resolved_url:
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
2012-07-26 10:44:10 +02:00
|
|
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-07-26 10:44:10 +02:00
|
|
|
# 'Hack' for _beta/_rc versions where _ is used instead of -
|
|
|
|
if ver not in resolved_url:
|
|
|
|
newver = helpers.version_change_end_sep(ver)
|
|
|
|
if newver and newver in resolved_url:
|
|
|
|
output.einfo(
|
|
|
|
"Version: using %s instead of %s" % (newver, ver)
|
|
|
|
)
|
|
|
|
ver = newver
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-07-26 10:44:10 +02:00
|
|
|
template = helpers.template_from_url(resolved_url, ver)
|
|
|
|
if '${' not in template:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo(
|
2012-07-26 10:44:10 +02:00
|
|
|
"Url doesn't seems to depend on version: %s not found in %s" %
|
|
|
|
(ver, resolved_url)
|
2012-04-28 18:16:05 +02:00
|
|
|
)
|
2012-07-26 10:44:10 +02:00
|
|
|
return []
|
|
|
|
else:
|
|
|
|
output.einfo("Scanning: %s" % template)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-07-26 10:44:10 +02:00
|
|
|
steps = helpers.generate_scan_paths(template)
|
2012-08-03 21:50:54 +02:00
|
|
|
ret = scan_directory_recursive(cp, ver, rev, "", steps, url, options)
|
2012-07-26 10:44:10 +02:00
|
|
|
|
|
|
|
if not ret:
|
2012-07-26 14:50:47 +02:00
|
|
|
ret = brute_force(pkg, url)
|
2012-05-23 16:30:43 +02:00
|
|
|
|
|
|
|
return ret
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2012-07-24 08:56:00 +02:00
|
|
|
def brute_force(pkg, url):
|
2012-07-26 10:44:10 +02:00
|
|
|
if CONFIG["brute-force"] == 0:
|
|
|
|
return []
|
|
|
|
|
2012-07-24 08:56:00 +02:00
|
|
|
cp, ver, rev = portage.pkgsplit(pkg.cpv)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
url = helpers.parse_mirror(url)
|
2012-01-02 21:31:05 +01:00
|
|
|
if not url:
|
|
|
|
return []
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
|
|
|
if re.match(bp, cp):
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
|
|
|
if re.match(bp, url):
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Generating version from " + ver)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
components = helpers.split_version(ver)
|
|
|
|
versions = helpers.gen_versions(components, CONFIG["brute-force"])
|
|
|
|
|
2012-05-21 12:03:14 +02:00
|
|
|
# Remove unwanted versions
|
2011-08-31 15:38:32 +02:00
|
|
|
for v in versions:
|
|
|
|
if helpers.vercmp(cp, ver, helpers.join_version(v)) >= 0:
|
|
|
|
versions.remove(v)
|
|
|
|
|
|
|
|
if not versions:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Can't generate new versions from " + ver)
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
template = helpers.template_from_url(url, ver)
|
|
|
|
|
|
|
|
if '${PV}' not in template:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo(
|
2012-04-28 18:16:05 +02:00
|
|
|
"Url doesn't seems to depend on full version: %s not found in %s" %
|
|
|
|
(ver, url))
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
else:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Brute forcing: %s" % template)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
result = []
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
done = []
|
|
|
|
|
|
|
|
while i < len(versions):
|
|
|
|
components = versions[i]
|
|
|
|
i += 1
|
|
|
|
if components in done:
|
|
|
|
continue
|
|
|
|
done.append(tuple(components))
|
|
|
|
|
|
|
|
version = helpers.join_version(components)
|
|
|
|
|
|
|
|
if helpers.version_filtered(cp, ver, version):
|
|
|
|
continue
|
|
|
|
|
2012-07-27 11:26:54 +02:00
|
|
|
try_url = helpers.url_from_template(template, version)
|
|
|
|
infos = helpers.tryurl(try_url, template)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
if not infos:
|
|
|
|
continue
|
2012-07-27 11:26:54 +02:00
|
|
|
confidence = confidence_score(try_url, url,
|
|
|
|
minimum=BRUTEFORCE_CONFIDENCE)
|
|
|
|
result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence])
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2011-09-21 10:09:50 +02:00
|
|
|
if len(result) > CONFIG['brute-force-false-watermark']:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo(
|
2012-04-28 18:16:05 +02:00
|
|
|
"Broken server detected ! Skipping brute force."
|
|
|
|
)
|
2011-09-21 10:09:50 +02:00
|
|
|
return []
|
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
if CONFIG["brute-force-recursive"]:
|
2012-04-28 18:16:05 +02:00
|
|
|
for v in helpers.gen_versions(list(components),
|
|
|
|
CONFIG["brute-force"]):
|
2011-08-31 15:38:32 +02:00
|
|
|
if v not in versions and tuple(v) not in done:
|
|
|
|
versions.append(v)
|
|
|
|
|
|
|
|
if CONFIG["oneshot"]:
|
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2012-07-24 08:56:00 +02:00
|
|
|
def can_handle(pkg, url):
|
2011-08-31 15:38:32 +02:00
|
|
|
return True
|