mirror of https://github.com/python/cpython
Fix SF bug #482171: webchecker dies on file: URLs w/o robots.txt
The cause seems to be that when a file URL doesn't exist, urllib.urlopen() raises OSError instead of IOError. Simply add this to the except clause. Not elegant, but effective. :-)
This commit is contained in:
parent
3a8e59ebe1
commit
f0953b9dff
|
@ -335,7 +335,7 @@ class Checker:
|
|||
rp.set_url(url)
|
||||
try:
|
||||
rp.read()
|
||||
except IOError, msg:
|
||||
except (OSError, IOError), msg:
|
||||
self.note(1, "I/O error parsing %s: %s", url, msg)
|
||||
|
||||
def run(self):
|
||||
|
@ -533,7 +533,7 @@ class Checker:
|
|||
url, fragment = url_pair
|
||||
try:
|
||||
return self.urlopener.open(url)
|
||||
except IOError, msg:
|
||||
except (OSError, IOError), msg:
|
||||
msg = self.sanitize(msg)
|
||||
self.note(0, "Error %s", msg)
|
||||
if self.verbose > 0:
|
||||
|
|
Loading…
Reference in New Issue