euscan: respect robots.txt
Signed-off-by: Corentin Chary <corentincj@iksaif.net>
This commit is contained in:
@ -57,6 +57,9 @@ def scan_directory_recursive(cpv, url, steps):
|
||||
except IOError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
return []
|
||||
|
||||
data = fp.read()
|
||||
|
||||
results = []
|
||||
|
@ -38,6 +38,9 @@ def scan(cpv, url):
|
||||
except IOError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
return []
|
||||
|
||||
data = fp.read()
|
||||
|
||||
dom = xml.dom.minidom.parseString(data)
|
||||
|
@ -33,6 +33,9 @@ def scan(cpv, url):
|
||||
except IOError:
|
||||
return []
|
||||
|
||||
if not fp:
|
||||
return []
|
||||
|
||||
data = fp.read()
|
||||
versions = json.loads(data)
|
||||
|
||||
|
Reference in New Issue
Block a user