diff --git a/Lib/robotparser.py b/Lib/robotparser.py index 447563fe654..730426f6ae7 100644 --- a/Lib/robotparser.py +++ b/Lib/robotparser.py @@ -131,7 +131,12 @@ class RobotFileParser: return True # search for given user agent matches # the first match counts - url = urllib.quote(urlparse.urlparse(urllib.unquote(url))[2]) or "/" + parsed_url = urlparse.urlparse(urllib.unquote(url)) + url = urlparse.urlunparse(('', '', parsed_url.path, + parsed_url.params, parsed_url.query, parsed_url.fragment)) + url = urllib.quote(url) + if not url: + url = "/" for entry in self.entries: if entry.applies_to(useragent): return entry.allowance(url) diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index 405d517d2e0..04158841077 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -202,6 +202,17 @@ bad = ['/folder1/anotherfile.html'] RobotTest(13, doc, good, bad, agent="googlebot") +# 14. For issue #6325 (query string support) +doc = """ +User-agent: * +Disallow: /some/path?name=value +""" + +good = ['/some/path'] +bad = ['/some/path?name=value'] + +RobotTest(14, doc, good, bad) + class NetworkTestCase(unittest.TestCase):