diff --git a/Lib/robotparser.py b/Lib/robotparser.py index 730426f6ae7..1722863d144 100644 --- a/Lib/robotparser.py +++ b/Lib/robotparser.py @@ -68,7 +68,9 @@ class RobotFileParser: def _add_entry(self, entry): if "*" in entry.useragents: # the default entry is considered last - self.default_entry = entry + if self.default_entry is None: + # the first default entry wins + self.default_entry = entry else: self.entries.append(entry) @@ -120,7 +122,7 @@ class RobotFileParser: entry.rulelines.append(RuleLine(line[1], True)) state = 2 if state == 2: - self.entries.append(entry) + self._add_entry(entry) def can_fetch(self, useragent, url): diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index 04158841077..aa73ec5663d 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -213,6 +213,20 @@ bad = ['/some/path?name=value'] RobotTest(14, doc, good, bad) +# 15. For issue #4108 (obey first * entry) +doc = """ +User-agent: * +Disallow: /some/path + +User-agent: * +Disallow: /another/path +""" + +good = ['/another/path'] +bad = ['/some/path'] + +RobotTest(15, doc, good, bad) + class NetworkTestCase(unittest.TestCase): diff --git a/Misc/NEWS b/Misc/NEWS index 3326e067ffb..175e11711b9 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,9 @@ Core and Builtins Library ------- +- Issue #4108: In urllib.robotparser, if there are multiple 'User-agent: *' + entries, consider the first one. + - Issue #8397: Raise an error when attempting to mix iteration and regular reads on a BZ2File object, rather than returning incorrect results.