2011-08-31 15:38:32 +02:00
|
|
|
import urllib2
|
|
|
|
import re
|
|
|
|
import StringIO
|
|
|
|
|
2012-06-08 09:18:59 +02:00
|
|
|
try:
|
|
|
|
from BeautifulSoup import BeautifulSoup
|
|
|
|
except ImportError:
|
|
|
|
from bs4 import BeautifulSoup
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
import portage
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
from euscan import CONFIG, SCANDIR_BLACKLIST_URLS, \
|
2012-05-23 16:30:43 +02:00
|
|
|
BRUTEFORCE_BLACKLIST_PACKAGES, BRUTEFORCE_BLACKLIST_URLS, output, helpers
|
|
|
|
|
|
|
|
HANDLER_NAME = "generic"
|
2012-05-23 16:44:44 +02:00
|
|
|
CONFIDENCE = 50.0
|
|
|
|
|
2012-05-23 16:30:43 +02:00
|
|
|
BRUTEFORCE_HANDLER_NAME = "brute_force"
|
2012-05-23 16:44:44 +02:00
|
|
|
BRUTEFORCE_CONFIDENCE = 30.0
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
def scan_html(data, url, pattern):
|
|
|
|
soup = BeautifulSoup(data)
|
|
|
|
results = []
|
|
|
|
|
|
|
|
for link in soup.findAll('a'):
|
|
|
|
href = link.get("href")
|
|
|
|
if not href:
|
|
|
|
continue
|
2011-12-30 11:34:39 +01:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
if href.startswith(url):
|
|
|
|
href = href.replace(url, "", 1)
|
|
|
|
|
|
|
|
match = re.match(pattern, href, re.I)
|
|
|
|
if match:
|
|
|
|
results.append((match.group(1), match.group(0)))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
def scan_ftp(data, url, pattern):
|
|
|
|
buf = StringIO.StringIO(data)
|
|
|
|
results = []
|
|
|
|
|
|
|
|
for line in buf.readlines():
|
|
|
|
line = line.replace("\n", "").replace("\r", "")
|
|
|
|
match = re.search(pattern, line, re.I)
|
|
|
|
if match:
|
|
|
|
results.append((match.group(1), match.group(0)))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2012-03-28 09:08:22 +02:00
|
|
|
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url):
|
2011-08-31 15:38:32 +02:00
|
|
|
if not steps:
|
|
|
|
return []
|
|
|
|
|
|
|
|
url += steps[0][0]
|
|
|
|
pattern = steps[0][1]
|
|
|
|
|
|
|
|
steps = steps[1:]
|
|
|
|
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Scanning: %s" % url)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
fp = helpers.urlopen(url)
|
|
|
|
except urllib2.URLError:
|
|
|
|
return []
|
|
|
|
except IOError:
|
|
|
|
return []
|
|
|
|
|
2011-09-06 16:32:29 +02:00
|
|
|
if not fp:
|
|
|
|
return []
|
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
data = fp.read()
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
if re.search("<\s*a\s+[^>]*href", data):
|
|
|
|
results.extend(scan_html(data, url, pattern))
|
|
|
|
elif url.startswith('ftp://'):
|
|
|
|
results.extend(scan_ftp(data, url, pattern))
|
|
|
|
|
|
|
|
versions = []
|
|
|
|
|
2012-04-19 15:36:44 +02:00
|
|
|
for up_pv, path in results:
|
|
|
|
pv = helpers.gentoo_mangle_version(up_pv)
|
|
|
|
if helpers.version_filtered(cp, ver, pv):
|
2011-08-31 15:38:32 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
if not url.endswith('/') and not path.startswith('/'):
|
|
|
|
path = url + '/' + path
|
|
|
|
else:
|
|
|
|
path = url + path
|
|
|
|
|
2012-03-28 09:08:22 +02:00
|
|
|
if not steps and path not in orig_url:
|
2012-05-23 16:44:44 +02:00
|
|
|
versions.append((path, pv, HANDLER_NAME, CONFIDENCE))
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
if steps:
|
2012-03-28 09:08:22 +02:00
|
|
|
ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url)
|
2011-08-31 15:38:32 +02:00
|
|
|
versions.extend(ret)
|
|
|
|
|
|
|
|
return versions
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
def scan(cpv, url):
|
|
|
|
for bu in SCANDIR_BLACKLIST_URLS:
|
|
|
|
if re.match(bu, url):
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("%s is blacklisted by rule %s" % (url, bu))
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
resolved_url = helpers.parse_mirror(url)
|
2012-01-02 21:31:05 +01:00
|
|
|
if not resolved_url:
|
|
|
|
return []
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
cp, ver, rev = portage.pkgsplit(cpv)
|
|
|
|
|
2012-02-20 08:21:07 +01:00
|
|
|
# 'Hack' for _beta/_rc versions where _ is used instead of -
|
|
|
|
if ver not in resolved_url:
|
|
|
|
newver = helpers.version_change_end_sep(ver)
|
|
|
|
if newver and newver in resolved_url:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo(
|
2012-04-28 18:16:05 +02:00
|
|
|
"Version: using %s instead of %s" % (newver, ver)
|
|
|
|
)
|
2012-02-20 08:21:07 +01:00
|
|
|
ver = newver
|
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
template = helpers.template_from_url(resolved_url, ver)
|
|
|
|
if '${' not in template:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo(
|
2012-04-28 18:16:05 +02:00
|
|
|
"Url doesn't seems to depend on version: %s not found in %s" %
|
|
|
|
(ver, resolved_url)
|
|
|
|
)
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
else:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Scanning: %s" % template)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
steps = helpers.generate_scan_paths(template)
|
2012-05-23 16:30:43 +02:00
|
|
|
ret = scan_directory_recursive(cp, ver, rev, "", steps, url)
|
|
|
|
|
|
|
|
return ret
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
def brute_force(cpv, url):
|
|
|
|
cp, ver, rev = portage.pkgsplit(cpv)
|
|
|
|
|
|
|
|
url = helpers.parse_mirror(url)
|
2012-01-02 21:31:05 +01:00
|
|
|
if not url:
|
|
|
|
return []
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
|
|
|
|
if re.match(bp, cp):
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
for bp in BRUTEFORCE_BLACKLIST_URLS:
|
|
|
|
if re.match(bp, url):
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("%s is blacklisted by rule %s" % (cp, bp))
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Generating version from " + ver)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
components = helpers.split_version(ver)
|
|
|
|
versions = helpers.gen_versions(components, CONFIG["brute-force"])
|
|
|
|
|
2012-05-21 12:03:14 +02:00
|
|
|
# Remove unwanted versions
|
2011-08-31 15:38:32 +02:00
|
|
|
for v in versions:
|
|
|
|
if helpers.vercmp(cp, ver, helpers.join_version(v)) >= 0:
|
|
|
|
versions.remove(v)
|
|
|
|
|
|
|
|
if not versions:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Can't generate new versions from " + ver)
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
template = helpers.template_from_url(url, ver)
|
|
|
|
|
|
|
|
if '${PV}' not in template:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo(
|
2012-04-28 18:16:05 +02:00
|
|
|
"Url doesn't seems to depend on full version: %s not found in %s" %
|
|
|
|
(ver, url))
|
2011-08-31 15:38:32 +02:00
|
|
|
return []
|
|
|
|
else:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo("Brute forcing: %s" % template)
|
2011-08-31 15:38:32 +02:00
|
|
|
|
|
|
|
result = []
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
done = []
|
|
|
|
|
|
|
|
while i < len(versions):
|
|
|
|
components = versions[i]
|
|
|
|
i += 1
|
|
|
|
if components in done:
|
|
|
|
continue
|
|
|
|
done.append(tuple(components))
|
|
|
|
|
|
|
|
version = helpers.join_version(components)
|
|
|
|
|
|
|
|
if helpers.version_filtered(cp, ver, version):
|
|
|
|
continue
|
|
|
|
|
|
|
|
url = helpers.url_from_template(template, version)
|
|
|
|
infos = helpers.tryurl(url, template)
|
|
|
|
|
|
|
|
if not infos:
|
|
|
|
continue
|
|
|
|
|
2012-05-23 16:44:44 +02:00
|
|
|
result.append([url, version, BRUTEFORCE_HANDLER_NAME,
|
|
|
|
BRUTEFORCE_CONFIDENCE])
|
2011-08-31 15:38:32 +02:00
|
|
|
|
2011-09-21 10:09:50 +02:00
|
|
|
if len(result) > CONFIG['brute-force-false-watermark']:
|
2012-05-23 16:30:43 +02:00
|
|
|
output.einfo(
|
2012-04-28 18:16:05 +02:00
|
|
|
"Broken server detected ! Skipping brute force."
|
|
|
|
)
|
2011-09-21 10:09:50 +02:00
|
|
|
return []
|
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
if CONFIG["brute-force-recursive"]:
|
2012-04-28 18:16:05 +02:00
|
|
|
for v in helpers.gen_versions(list(components),
|
|
|
|
CONFIG["brute-force"]):
|
2011-08-31 15:38:32 +02:00
|
|
|
if v not in versions and tuple(v) not in done:
|
|
|
|
versions.append(v)
|
|
|
|
|
|
|
|
if CONFIG["oneshot"]:
|
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2012-04-28 18:16:05 +02:00
|
|
|
|
2011-08-31 15:38:32 +02:00
|
|
|
def can_handle(cpv, url):
|
|
|
|
return True
|