2000-03-31 10:58:54 -04:00
|
|
|
#
|
|
|
|
# Secret Labs' Regular Expression Engine
|
|
|
|
#
|
2000-06-09 11:08:07 -03:00
|
|
|
# convert re-style regular expression to sre pattern
|
2000-03-31 10:58:54 -04:00
|
|
|
#
|
2001-01-14 11:06:11 -04:00
|
|
|
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
|
2000-03-31 10:58:54 -04:00
|
|
|
#
|
2000-08-01 15:20:07 -03:00
|
|
|
# See the sre.py file for information on usage and redistribution.
|
2000-03-31 10:58:54 -04:00
|
|
|
#
|
|
|
|
|
2001-09-04 16:10:20 -03:00
|
|
|
"""Internal support module for sre"""
|
|
|
|
|
2001-01-14 17:00:44 -04:00
|
|
|
# XXX: show string offset and offending character for all errors
|
|
|
|
|
2000-03-31 10:58:54 -04:00
|
|
|
from sre_constants import *
|
|
|
|
|
|
|
|
SPECIAL_CHARS = ".\\[{()*+?^$|"
|
2000-09-02 08:03:34 -03:00
|
|
|
REPEAT_CHARS = "*+?{"
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
DIGITS = frozenset("0123456789")
|
2000-04-10 14:10:48 -03:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
OCTDIGITS = frozenset("01234567")
|
|
|
|
HEXDIGITS = frozenset("0123456789abcdefABCDEF")
|
2015-03-24 17:58:14 -03:00
|
|
|
ASCIILETTERS = frozenset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
WHITESPACE = frozenset(" \t\n\r\v\f")
|
|
|
|
|
2014-11-09 19:56:33 -04:00
|
|
|
_REPEATCODES = frozenset({MIN_REPEAT, MAX_REPEAT})
|
|
|
|
_UNITCODES = frozenset({ANY, RANGE, IN, LITERAL, NOT_LITERAL, CATEGORY})
|
2000-06-09 11:08:07 -03:00
|
|
|
|
2000-03-31 10:58:54 -04:00
|
|
|
ESCAPES = {
|
2001-02-18 08:05:16 -04:00
|
|
|
r"\a": (LITERAL, ord("\a")),
|
|
|
|
r"\b": (LITERAL, ord("\b")),
|
|
|
|
r"\f": (LITERAL, ord("\f")),
|
|
|
|
r"\n": (LITERAL, ord("\n")),
|
|
|
|
r"\r": (LITERAL, ord("\r")),
|
|
|
|
r"\t": (LITERAL, ord("\t")),
|
|
|
|
r"\v": (LITERAL, ord("\v")),
|
2000-06-30 10:55:15 -03:00
|
|
|
r"\\": (LITERAL, ord("\\"))
|
2000-03-31 10:58:54 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
CATEGORIES = {
|
2001-01-14 11:06:11 -04:00
|
|
|
r"\A": (AT, AT_BEGINNING_STRING), # start of string
|
2000-06-29 21:27:46 -03:00
|
|
|
r"\b": (AT, AT_BOUNDARY),
|
|
|
|
r"\B": (AT, AT_NON_BOUNDARY),
|
|
|
|
r"\d": (IN, [(CATEGORY, CATEGORY_DIGIT)]),
|
|
|
|
r"\D": (IN, [(CATEGORY, CATEGORY_NOT_DIGIT)]),
|
|
|
|
r"\s": (IN, [(CATEGORY, CATEGORY_SPACE)]),
|
|
|
|
r"\S": (IN, [(CATEGORY, CATEGORY_NOT_SPACE)]),
|
|
|
|
r"\w": (IN, [(CATEGORY, CATEGORY_WORD)]),
|
|
|
|
r"\W": (IN, [(CATEGORY, CATEGORY_NOT_WORD)]),
|
2001-01-14 11:06:11 -04:00
|
|
|
r"\Z": (AT, AT_END_STRING), # end of string
|
2000-03-31 10:58:54 -04:00
|
|
|
}
|
|
|
|
|
2000-06-09 11:08:07 -03:00
|
|
|
FLAGS = {
|
2000-06-29 05:58:44 -03:00
|
|
|
# standard flags
|
2000-06-09 11:08:07 -03:00
|
|
|
"i": SRE_FLAG_IGNORECASE,
|
|
|
|
"L": SRE_FLAG_LOCALE,
|
|
|
|
"m": SRE_FLAG_MULTILINE,
|
|
|
|
"s": SRE_FLAG_DOTALL,
|
|
|
|
"x": SRE_FLAG_VERBOSE,
|
2000-06-29 05:58:44 -03:00
|
|
|
# extensions
|
2008-08-19 14:56:33 -03:00
|
|
|
"a": SRE_FLAG_ASCII,
|
2000-06-29 05:58:44 -03:00
|
|
|
"t": SRE_FLAG_TEMPLATE,
|
|
|
|
"u": SRE_FLAG_UNICODE,
|
2000-06-09 11:08:07 -03:00
|
|
|
}
|
|
|
|
|
2016-09-09 18:57:55 -03:00
|
|
|
GLOBAL_FLAGS = (SRE_FLAG_ASCII | SRE_FLAG_LOCALE | SRE_FLAG_UNICODE |
|
|
|
|
SRE_FLAG_DEBUG | SRE_FLAG_TEMPLATE)
|
|
|
|
|
|
|
|
class Verbose(Exception):
|
|
|
|
pass
|
|
|
|
|
2000-07-23 18:46:17 -03:00
|
|
|
class Pattern:
|
|
|
|
# master pattern object. keeps track of global attributes
|
2000-03-31 10:58:54 -04:00
|
|
|
def __init__(self):
|
2000-06-30 04:50:59 -03:00
|
|
|
self.flags = 0
|
|
|
|
self.groupdict = {}
|
2015-11-05 11:49:26 -04:00
|
|
|
self.groupwidths = [None] # group 0
|
2015-02-21 04:07:35 -04:00
|
|
|
self.lookbehindgroups = None
|
|
|
|
@property
|
|
|
|
def groups(self):
|
2015-11-05 11:49:26 -04:00
|
|
|
return len(self.groupwidths)
|
2000-10-28 16:30:41 -03:00
|
|
|
def opengroup(self, name=None):
|
2000-06-30 04:50:59 -03:00
|
|
|
gid = self.groups
|
2015-11-05 11:49:26 -04:00
|
|
|
self.groupwidths.append(None)
|
2014-09-29 16:49:23 -03:00
|
|
|
if self.groups > MAXGROUPS:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise error("too many groups")
|
2002-06-01 21:40:05 -03:00
|
|
|
if name is not None:
|
2001-11-03 15:35:43 -04:00
|
|
|
ogid = self.groupdict.get(name, None)
|
|
|
|
if ogid is not None:
|
2014-11-10 07:49:00 -04:00
|
|
|
raise error("redefinition of group name %r as group %d; "
|
|
|
|
"was group %d" % (name, gid, ogid))
|
2000-06-30 04:50:59 -03:00
|
|
|
self.groupdict[name] = gid
|
|
|
|
return gid
|
2015-02-21 04:07:35 -04:00
|
|
|
def closegroup(self, gid, p):
|
2015-11-05 11:49:26 -04:00
|
|
|
self.groupwidths[gid] = p.getwidth()
|
2000-10-28 16:30:41 -03:00
|
|
|
def checkgroup(self, gid):
|
2015-11-05 11:49:26 -04:00
|
|
|
return gid < self.groups and self.groupwidths[gid] is not None
|
2015-02-21 04:07:35 -04:00
|
|
|
|
|
|
|
def checklookbehindgroup(self, gid, source):
|
|
|
|
if self.lookbehindgroups is not None:
|
|
|
|
if not self.checkgroup(gid):
|
|
|
|
raise source.error('cannot refer to an open group')
|
|
|
|
if gid >= self.lookbehindgroups:
|
|
|
|
raise source.error('cannot refer to group defined in the same '
|
|
|
|
'lookbehind subpattern')
|
2000-03-31 10:58:54 -04:00
|
|
|
|
|
|
|
class SubPattern:
|
|
|
|
# a subpattern, in intermediate form
|
|
|
|
def __init__(self, pattern, data=None):
|
2000-06-30 04:50:59 -03:00
|
|
|
self.pattern = pattern
|
2002-06-01 21:40:05 -03:00
|
|
|
if data is None:
|
2000-06-30 04:50:59 -03:00
|
|
|
data = []
|
|
|
|
self.data = data
|
|
|
|
self.width = None
|
2017-05-14 02:32:33 -03:00
|
|
|
|
2000-07-23 18:46:17 -03:00
|
|
|
def dump(self, level=0):
|
2014-09-21 16:47:55 -03:00
|
|
|
nl = True
|
2007-06-07 20:15:56 -03:00
|
|
|
seqtypes = (tuple, list)
|
2000-07-23 18:46:17 -03:00
|
|
|
for op, av in self.data:
|
2014-11-09 14:48:36 -04:00
|
|
|
print(level*" " + str(op), end='')
|
2014-11-11 15:13:28 -04:00
|
|
|
if op is IN:
|
2000-07-23 18:46:17 -03:00
|
|
|
# member sublanguage
|
2014-09-21 16:47:55 -03:00
|
|
|
print()
|
2000-07-23 18:46:17 -03:00
|
|
|
for op, a in av:
|
2014-11-09 14:48:36 -04:00
|
|
|
print((level+1)*" " + str(op), a)
|
2014-11-11 15:13:28 -04:00
|
|
|
elif op is BRANCH:
|
2014-09-21 16:47:55 -03:00
|
|
|
print()
|
|
|
|
for i, a in enumerate(av[1]):
|
|
|
|
if i:
|
2014-11-09 14:48:36 -04:00
|
|
|
print(level*" " + "OR")
|
2014-09-21 16:47:55 -03:00
|
|
|
a.dump(level+1)
|
2014-11-11 15:13:28 -04:00
|
|
|
elif op is GROUPREF_EXISTS:
|
2014-09-21 16:47:55 -03:00
|
|
|
condgroup, item_yes, item_no = av
|
|
|
|
print('', condgroup)
|
|
|
|
item_yes.dump(level+1)
|
|
|
|
if item_no:
|
2014-11-09 14:48:36 -04:00
|
|
|
print(level*" " + "ELSE")
|
2014-09-21 16:47:55 -03:00
|
|
|
item_no.dump(level+1)
|
2007-06-07 20:15:56 -03:00
|
|
|
elif isinstance(av, seqtypes):
|
2014-09-21 16:47:55 -03:00
|
|
|
nl = False
|
2000-07-23 18:46:17 -03:00
|
|
|
for a in av:
|
|
|
|
if isinstance(a, SubPattern):
|
2014-09-21 16:47:55 -03:00
|
|
|
if not nl:
|
|
|
|
print()
|
|
|
|
a.dump(level+1)
|
|
|
|
nl = True
|
2000-07-23 18:46:17 -03:00
|
|
|
else:
|
2014-09-21 16:47:55 -03:00
|
|
|
if not nl:
|
|
|
|
print(' ', end='')
|
|
|
|
print(a, end='')
|
|
|
|
nl = False
|
|
|
|
if not nl:
|
|
|
|
print()
|
2000-07-23 18:46:17 -03:00
|
|
|
else:
|
2014-09-21 16:47:55 -03:00
|
|
|
print('', av)
|
2000-03-31 10:58:54 -04:00
|
|
|
def __repr__(self):
|
2000-06-30 04:50:59 -03:00
|
|
|
return repr(self.data)
|
2000-03-31 10:58:54 -04:00
|
|
|
def __len__(self):
|
2000-06-30 04:50:59 -03:00
|
|
|
return len(self.data)
|
2000-03-31 10:58:54 -04:00
|
|
|
def __delitem__(self, index):
|
2000-06-30 04:50:59 -03:00
|
|
|
del self.data[index]
|
2000-03-31 10:58:54 -04:00
|
|
|
def __getitem__(self, index):
|
Merged revisions 53005-53303 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
........
r53012 | walter.doerwald | 2006-12-12 22:55:31 +0100 (Tue, 12 Dec 2006) | 2 lines
Fix typo.
........
r53023 | brett.cannon | 2006-12-13 23:31:37 +0100 (Wed, 13 Dec 2006) | 2 lines
Remove an unneeded import of 'warnings'.
........
r53025 | brett.cannon | 2006-12-14 00:02:38 +0100 (Thu, 14 Dec 2006) | 2 lines
Remove unneeded imports of 'warnings'.
........
r53026 | brett.cannon | 2006-12-14 00:09:53 +0100 (Thu, 14 Dec 2006) | 4 lines
Add test.test_support.guard_warnings_filter . This function returns a context
manager that protects warnings.filter from being modified once the context is
exited.
........
r53029 | george.yoshida | 2006-12-14 03:22:44 +0100 (Thu, 14 Dec 2006) | 2 lines
Note that guard_warnings_filter was added in 2.6
........
r53031 | vinay.sajip | 2006-12-14 09:53:55 +0100 (Thu, 14 Dec 2006) | 1 line
Added news on recent changes to logging
........
r53032 | andrew.kuchling | 2006-12-14 19:57:53 +0100 (Thu, 14 Dec 2006) | 1 line
[Patch #1599256 from David Watson] check that os.fsync is available before using it
........
r53042 | kurt.kaiser | 2006-12-15 06:13:11 +0100 (Fri, 15 Dec 2006) | 6 lines
1. Avoid hang when encountering a duplicate in a completion list. Bug 1571112.
2. Duplicate some old entries from Python's NEWS to IDLE's NEWS.txt
M AutoCompleteWindow.py
M NEWS.txt
........
r53048 | andrew.kuchling | 2006-12-18 18:12:31 +0100 (Mon, 18 Dec 2006) | 1 line
[Bug #1618083] Add missing word; make a few grammar fixes
........
r53050 | andrew.kuchling | 2006-12-18 18:16:05 +0100 (Mon, 18 Dec 2006) | 1 line
Bump version
........
r53051 | andrew.kuchling | 2006-12-18 18:22:07 +0100 (Mon, 18 Dec 2006) | 1 line
[Bug #1616726] Fix description of generator.close(); if you raise some random exception, the exception is raised and doesn't trigger a RuntimeError
........
r53052 | andrew.kuchling | 2006-12-18 18:38:14 +0100 (Mon, 18 Dec 2006) | 1 line
Describe new methods in Queue module
........
r53053 | andrew.kuchling | 2006-12-18 20:22:24 +0100 (Mon, 18 Dec 2006) | 1 line
[Patch #1615868 by Lars Gustaebel] Use Py_off_t to fix BZ2File.seek() for offsets > 2Gb
........
r53057 | andrew.kuchling | 2006-12-18 22:29:07 +0100 (Mon, 18 Dec 2006) | 1 line
Fix markup
........
r53063 | thomas.wouters | 2006-12-19 09:17:50 +0100 (Tue, 19 Dec 2006) | 5 lines
Make sre's SubPattern objects accept slice objects like it already accepts
simple slices.
........
r53065 | andrew.kuchling | 2006-12-19 15:13:05 +0100 (Tue, 19 Dec 2006) | 6 lines
[Patch #1618455 by Ben Maurer] Improve speed of HMAC by using str.translate()
instead of a more general XOR that has to construct a list.
Slightly modified from Maurer's patch: the _strxor() function is no longer
necessary at all.
........
r53066 | andrew.kuchling | 2006-12-19 15:28:23 +0100 (Tue, 19 Dec 2006) | 9 lines
[Bug #1613651] Document socket.recv_into, socket.recvfrom_into
Also, the text for recvfrom told you to read recv() for an explanation of the
'flags' argument, but recv() just pointed you at the man page. Copied the
man-page text to recvfrom(), recvfrom_into, recv_into to avoid the pointless
redirection.
I don't have LaTeX on this machine; hope my markup is OK.
........
r53067 | andrew.kuchling | 2006-12-19 15:29:04 +0100 (Tue, 19 Dec 2006) | 1 line
Comment typo
........
r53068 | andrew.kuchling | 2006-12-19 16:11:41 +0100 (Tue, 19 Dec 2006) | 1 line
[Patch #1617413 from Dug Song] Fix HTTP Basic authentication via HTTPS
........
r53071 | andrew.kuchling | 2006-12-19 16:18:12 +0100 (Tue, 19 Dec 2006) | 1 line
[Patch #1600491 from Jim Jewett] Describe how to build help files on Windows
........
r53073 | andrew.kuchling | 2006-12-19 16:43:10 +0100 (Tue, 19 Dec 2006) | 6 lines
[Patch #1587139 by kxroberto] Protect lock acquisition/release with
try...finally to ensure the lock is always released. This could use
the 'with' statement, but the patch uses 'finally'.
2.5 backport candidate.
........
r53074 | vinay.sajip | 2006-12-19 19:29:11 +0100 (Tue, 19 Dec 2006) | 1 line
Updated documentation for findCaller() to indicate that a 3-tuple is now returned, rather than a 2-tuple.
........
r53090 | georg.brandl | 2006-12-19 23:06:46 +0100 (Tue, 19 Dec 2006) | 3 lines
Patch #1484695: The tarfile module now raises a HeaderError exception
if a buffer given to frombuf() is invalid.
........
r53099 | raymond.hettinger | 2006-12-20 07:42:06 +0100 (Wed, 20 Dec 2006) | 5 lines
Bug #1590891: random.randrange don't return correct value for big number
Needs to be backported.
........
r53106 | georg.brandl | 2006-12-20 12:55:16 +0100 (Wed, 20 Dec 2006) | 3 lines
Testcase for patch #1484695.
........
r53110 | andrew.kuchling | 2006-12-20 20:48:20 +0100 (Wed, 20 Dec 2006) | 17 lines
[Apply length-checking.diff from bug #1599254]
Add length checking to single-file mailbox formats: before doing a
flush() on a mailbox, seek to the end and verify its length is
unchanged, raising ExternalClashError if the file's length has
changed.
This fix avoids potential data loss if some other process appends to
the mailbox file after the table of contents has been generated;
instead of overwriting the modified file, you'll get the exception.
I also noticed that the self._lookup() call in self.flush() wasn't
necessary (everything that sets self._pending to True also calls
self.lookup()), and replaced it by an assertion.
2.5 backport candidate.
........
r53112 | andrew.kuchling | 2006-12-20 20:57:10 +0100 (Wed, 20 Dec 2006) | 1 line
[Bug #1619674] Make sum() use the term iterable, not sequence
........
r53113 | andrew.kuchling | 2006-12-20 20:58:11 +0100 (Wed, 20 Dec 2006) | 1 line
Two grammar fixes
........
r53115 | andrew.kuchling | 2006-12-20 21:11:12 +0100 (Wed, 20 Dec 2006) | 5 lines
Some other built-in functions are described with 'sequence' arguments
that should really be 'iterable'; this commit changes them.
Did I miss any? Did I introduce any errors?
........
r53117 | andrew.kuchling | 2006-12-20 21:20:42 +0100 (Wed, 20 Dec 2006) | 1 line
[Bug #1619680] in_dll() arguments are documented in the wrong order
........
r53120 | neal.norwitz | 2006-12-21 05:38:00 +0100 (Thu, 21 Dec 2006) | 1 line
Lars asked for permission on on python-dev for work on tarfile.py
........
r53125 | andrew.kuchling | 2006-12-21 14:40:29 +0100 (Thu, 21 Dec 2006) | 1 line
Mention the os.SEEK_* constants
........
r53129 | walter.doerwald | 2006-12-21 19:06:30 +0100 (Thu, 21 Dec 2006) | 2 lines
Fix typo.
........
r53131 | thomas.heller | 2006-12-21 19:30:56 +0100 (Thu, 21 Dec 2006) | 3 lines
Fix wrong markup of an argument in a method signature.
Will backport.
........
r53137 | andrew.kuchling | 2006-12-22 01:50:56 +0100 (Fri, 22 Dec 2006) | 1 line
Typo fix
........
r53139 | andrew.kuchling | 2006-12-22 14:25:02 +0100 (Fri, 22 Dec 2006) | 1 line
[Bug #737202; fix from Titus Brown] Make CGIHTTPServer work for scripts in sub-directories
........
r53141 | andrew.kuchling | 2006-12-22 16:04:45 +0100 (Fri, 22 Dec 2006) | 6 lines
[Bug #802128] Make the mode argument of dumbdbm actually work the way it's
described, and add a test for it.
2.5 bugfix candidate, maybe; arguably this patch changes the API of
dumbdbm and shouldn't be added in a point-release.
........
r53142 | andrew.kuchling | 2006-12-22 16:16:58 +0100 (Fri, 22 Dec 2006) | 6 lines
[Bug #802128 continued] Modify mode depending on the process umask.
Is there really no other way to read the umask than to set it?
Hope this works on Windows...
........
r53145 | andrew.kuchling | 2006-12-22 17:43:26 +0100 (Fri, 22 Dec 2006) | 1 line
[Bug #776202] Apply Walter Doerwald's patch to use text mode for encoded files
........
r53146 | andrew.kuchling | 2006-12-22 19:41:42 +0100 (Fri, 22 Dec 2006) | 9 lines
[Patch #783050 from Patrick Lynch] The emulation of forkpty() is incorrect;
the master should close the slave fd.
Added a test to test_pty.py that reads from the master_fd after doing
a pty.fork(); without the fix it hangs forever instead of raising an
exception. (<crossing fingers for the buildbots>)
2.5 backport candidate.
........
r53147 | andrew.kuchling | 2006-12-22 20:06:16 +0100 (Fri, 22 Dec 2006) | 1 line
[Patch #827559 from Chris Gonnerman] Make SimpleHTTPServer redirect when a directory URL is missing the trailing slash; this lets relative links work.
........
r53149 | andrew.kuchling | 2006-12-22 20:21:27 +0100 (Fri, 22 Dec 2006) | 1 line
Darn; this test works when you run test_pty.py directly, but fails when regrtest runs it (the os.read() raises os.error). I can't figure out the cause, so am commenting out the test.
........
r53150 | andrew.kuchling | 2006-12-22 22:48:19 +0100 (Fri, 22 Dec 2006) | 1 line
Frak; this test also fails
........
r53153 | lars.gustaebel | 2006-12-23 17:40:13 +0100 (Sat, 23 Dec 2006) | 5 lines
Patch #1230446: tarfile.py: fix ExFileObject so that read() and tell()
work correctly together with readline().
Will backport to 2.5.
........
r53155 | lars.gustaebel | 2006-12-23 18:57:23 +0100 (Sat, 23 Dec 2006) | 5 lines
Patch #1262036: Prevent TarFiles from being added to themselves under
certain conditions.
Will backport to 2.5.
........
r53159 | andrew.kuchling | 2006-12-27 04:25:31 +0100 (Wed, 27 Dec 2006) | 4 lines
[Part of patch #1182394] Move the HMAC blocksize to be a class-level
constant; this allows changing it in a subclass. To accommodate this,
copy() now uses __class__. Also add some text to a comment.
........
r53160 | andrew.kuchling | 2006-12-27 04:31:24 +0100 (Wed, 27 Dec 2006) | 1 line
[Rest of patch #1182394] Add ._current() method so that we can use the written-in-C .hexdigest() method
........
r53161 | lars.gustaebel | 2006-12-27 11:30:46 +0100 (Wed, 27 Dec 2006) | 4 lines
Patch #1504073: Fix tarfile.open() for mode "r" with a fileobj argument.
Will backport to 2.5.
........
r53165 | neal.norwitz | 2006-12-28 05:39:20 +0100 (Thu, 28 Dec 2006) | 1 line
Remove a stray (old) macro name left around (I guess)
........
r53188 | neal.norwitz | 2006-12-29 04:01:53 +0100 (Fri, 29 Dec 2006) | 1 line
SF bug #1623890, fix argument name in docstring
........
r53200 | raymond.hettinger | 2006-12-30 05:01:17 +0100 (Sat, 30 Dec 2006) | 1 line
For sets with cyclical reprs, emit an ellipsis instead of infinitely recursing.
........
r53232 | brett.cannon | 2007-01-04 01:23:49 +0100 (Thu, 04 Jan 2007) | 3 lines
Add EnvironmentVarGuard to test.test_support. Provides a context manager to
temporarily set or unset environment variables.
........
r53235 | neal.norwitz | 2007-01-04 07:25:31 +0100 (Thu, 04 Jan 2007) | 1 line
SF #1627373, fix typo in CarbonEvt.
........
r53244 | raymond.hettinger | 2007-01-04 18:53:34 +0100 (Thu, 04 Jan 2007) | 1 line
Fix stability of heapq's nlargest() and nsmallest().
........
r53249 | martin.v.loewis | 2007-01-04 22:06:12 +0100 (Thu, 04 Jan 2007) | 3 lines
Bug #1566280: Explicitly invoke threading._shutdown from Py_Main,
to avoid relying on atexit.
Will backport to 2.5.
........
r53252 | gregory.p.smith | 2007-01-05 02:59:42 +0100 (Fri, 05 Jan 2007) | 3 lines
Support linking of the bsddb module against BerkeleyDB 4.5.x
(will backport to 2.5)
........
r53253 | gregory.p.smith | 2007-01-05 03:06:17 +0100 (Fri, 05 Jan 2007) | 2 lines
bump module version to match supported berkeleydb version
........
r53255 | neal.norwitz | 2007-01-05 06:25:22 +0100 (Fri, 05 Jan 2007) | 6 lines
Prevent crash on shutdown which can occur if we are finalizing
and the module dict has been cleared already and some object
raises a warning (like in a __del__).
Will backport.
........
r53258 | gregory.p.smith | 2007-01-05 08:21:35 +0100 (Fri, 05 Jan 2007) | 2 lines
typo fix
........
r53260 | neal.norwitz | 2007-01-05 09:06:43 +0100 (Fri, 05 Jan 2007) | 1 line
Add Collin Winter for access to update PEP 3107
........
r53262 | andrew.kuchling | 2007-01-05 15:22:17 +0100 (Fri, 05 Jan 2007) | 1 line
[Bug #1622533] Make docstrings raw strings because they contain control characters (\0, \1)
........
r53264 | andrew.kuchling | 2007-01-05 16:51:24 +0100 (Fri, 05 Jan 2007) | 1 line
[Patch #1520904] Fix bsddb tests to write to the temp directory instead of the Lib/bsddb/test directory
........
r53279 | brett.cannon | 2007-01-05 22:45:09 +0100 (Fri, 05 Jan 2007) | 3 lines
Silence a warning from gcc 4.0.1 by specifying a function's parameter list is
'void' instead of just a set of empty parentheses.
........
r53285 | raymond.hettinger | 2007-01-06 02:14:41 +0100 (Sat, 06 Jan 2007) | 2 lines
SF# 1409443: Expand comment to cover the interaction between f->f_lasti and the PREDICT macros.
........
r53286 | anthony.baxter | 2007-01-06 05:45:54 +0100 (Sat, 06 Jan 2007) | 1 line
update to (c) years to include 2007
........
r53291 | neal.norwitz | 2007-01-06 22:24:35 +0100 (Sat, 06 Jan 2007) | 1 line
Add Josiah to SF for maintaining asyncore/asynchat
........
r53293 | peter.astrand | 2007-01-07 09:53:46 +0100 (Sun, 07 Jan 2007) | 1 line
Re-implemented fix for #1531862 once again, in a way that works with Python 2.2. Fixes bug #1603424.
........
r53295 | peter.astrand | 2007-01-07 15:34:16 +0100 (Sun, 07 Jan 2007) | 1 line
Avoid O(N**2) bottleneck in _communicate_(). Fixes #1598181.
........
r53300 | raymond.hettinger | 2007-01-08 19:09:20 +0100 (Mon, 08 Jan 2007) | 1 line
Fix zero-length corner case for iterating over a mutating deque.
........
r53301 | vinay.sajip | 2007-01-08 19:50:32 +0100 (Mon, 08 Jan 2007) | 4 lines
Bare except clause removed from SMTPHandler.emit(). Now, only ImportError is trapped.
Bare except clause removed from SocketHandler.createSocket(). Now, only socket.error is trapped.
(SF #411881)
........
r53302 | vinay.sajip | 2007-01-08 19:51:46 +0100 (Mon, 08 Jan 2007) | 2 lines
Bare except clause removed from LogRecord.__init__. Now, only ValueError, TypeError and AttributeError are trapped.
(SF #411881)
........
r53303 | vinay.sajip | 2007-01-08 19:52:36 +0100 (Mon, 08 Jan 2007) | 1 line
Added entries about removal of some bare except clauses from logging.
........
2007-01-09 19:18:33 -04:00
|
|
|
if isinstance(index, slice):
|
|
|
|
return SubPattern(self.pattern, self.data[index])
|
2000-06-30 04:50:59 -03:00
|
|
|
return self.data[index]
|
2000-03-31 10:58:54 -04:00
|
|
|
def __setitem__(self, index, code):
|
2000-06-30 04:50:59 -03:00
|
|
|
self.data[index] = code
|
2000-03-31 10:58:54 -04:00
|
|
|
def insert(self, index, code):
|
2000-06-30 04:50:59 -03:00
|
|
|
self.data.insert(index, code)
|
2000-03-31 10:58:54 -04:00
|
|
|
def append(self, code):
|
2000-06-30 04:50:59 -03:00
|
|
|
self.data.append(code)
|
2000-03-31 10:58:54 -04:00
|
|
|
def getwidth(self):
|
2000-06-30 04:50:59 -03:00
|
|
|
# determine the width (min, max) for this subpattern
|
2014-10-10 05:14:49 -03:00
|
|
|
if self.width is not None:
|
2000-06-30 04:50:59 -03:00
|
|
|
return self.width
|
2007-01-15 12:59:06 -04:00
|
|
|
lo = hi = 0
|
2000-06-30 04:50:59 -03:00
|
|
|
for op, av in self.data:
|
|
|
|
if op is BRANCH:
|
2013-08-19 16:50:54 -03:00
|
|
|
i = MAXREPEAT - 1
|
2000-08-01 18:05:41 -03:00
|
|
|
j = 0
|
2000-06-30 04:50:59 -03:00
|
|
|
for av in av[1]:
|
2000-08-01 18:05:41 -03:00
|
|
|
l, h = av.getwidth()
|
|
|
|
i = min(i, l)
|
2000-08-01 19:47:49 -03:00
|
|
|
j = max(j, h)
|
2000-06-30 04:50:59 -03:00
|
|
|
lo = lo + i
|
|
|
|
hi = hi + j
|
|
|
|
elif op is CALL:
|
|
|
|
i, j = av.getwidth()
|
|
|
|
lo = lo + i
|
|
|
|
hi = hi + j
|
|
|
|
elif op is SUBPATTERN:
|
2016-09-09 18:57:55 -03:00
|
|
|
i, j = av[-1].getwidth()
|
2000-06-30 04:50:59 -03:00
|
|
|
lo = lo + i
|
|
|
|
hi = hi + j
|
2014-10-10 05:14:49 -03:00
|
|
|
elif op in _REPEATCODES:
|
2000-06-30 04:50:59 -03:00
|
|
|
i, j = av[2].getwidth()
|
2013-08-19 16:50:54 -03:00
|
|
|
lo = lo + i * av[0]
|
|
|
|
hi = hi + j * av[1]
|
2014-10-10 05:14:49 -03:00
|
|
|
elif op in _UNITCODES:
|
2000-06-30 04:50:59 -03:00
|
|
|
lo = lo + 1
|
|
|
|
hi = hi + 1
|
2015-02-21 04:07:35 -04:00
|
|
|
elif op is GROUPREF:
|
2015-11-05 11:49:26 -04:00
|
|
|
i, j = self.pattern.groupwidths[av]
|
2015-02-21 04:07:35 -04:00
|
|
|
lo = lo + i
|
|
|
|
hi = hi + j
|
|
|
|
elif op is GROUPREF_EXISTS:
|
|
|
|
i, j = av[1].getwidth()
|
|
|
|
if av[2] is not None:
|
|
|
|
l, h = av[2].getwidth()
|
|
|
|
i = min(i, l)
|
|
|
|
j = max(j, h)
|
|
|
|
else:
|
|
|
|
i = 0
|
|
|
|
lo = lo + i
|
|
|
|
hi = hi + j
|
|
|
|
elif op is SUCCESS:
|
2000-06-30 04:50:59 -03:00
|
|
|
break
|
2013-08-19 16:50:54 -03:00
|
|
|
self.width = min(lo, MAXREPEAT - 1), min(hi, MAXREPEAT)
|
2000-06-30 04:50:59 -03:00
|
|
|
return self.width
|
2000-03-31 10:58:54 -04:00
|
|
|
|
|
|
|
class Tokenizer:
|
|
|
|
def __init__(self, string):
|
2012-06-23 08:29:19 -03:00
|
|
|
self.istext = isinstance(string, str)
|
2014-11-10 07:49:00 -04:00
|
|
|
self.string = string
|
2014-10-10 05:14:49 -03:00
|
|
|
if not self.istext:
|
|
|
|
string = str(string, 'latin1')
|
2014-11-10 07:49:00 -04:00
|
|
|
self.decoded_string = string
|
2000-07-01 20:49:14 -03:00
|
|
|
self.index = 0
|
2014-11-10 08:38:16 -04:00
|
|
|
self.next = None
|
2000-07-01 20:49:14 -03:00
|
|
|
self.__next()
|
2000-03-31 10:58:54 -04:00
|
|
|
def __next(self):
|
2014-10-10 05:14:49 -03:00
|
|
|
index = self.index
|
|
|
|
try:
|
2014-11-10 07:49:00 -04:00
|
|
|
char = self.decoded_string[index]
|
2014-10-10 05:14:49 -03:00
|
|
|
except IndexError:
|
2000-07-01 20:49:14 -03:00
|
|
|
self.next = None
|
|
|
|
return
|
2007-10-19 19:06:24 -03:00
|
|
|
if char == "\\":
|
2014-10-10 05:14:49 -03:00
|
|
|
index += 1
|
2000-06-30 04:50:59 -03:00
|
|
|
try:
|
2014-11-10 07:49:00 -04:00
|
|
|
char += self.decoded_string[index]
|
2000-06-30 04:50:59 -03:00
|
|
|
except IndexError:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise error("bad escape (end of pattern)",
|
2014-11-10 12:28:53 -04:00
|
|
|
self.string, len(self.string) - 1) from None
|
2014-10-10 05:14:49 -03:00
|
|
|
self.index = index + 1
|
2000-07-01 20:49:14 -03:00
|
|
|
self.next = char
|
2014-10-10 05:14:49 -03:00
|
|
|
def match(self, char):
|
2000-06-30 04:50:59 -03:00
|
|
|
if char == self.next:
|
2014-10-10 05:14:49 -03:00
|
|
|
self.__next()
|
|
|
|
return True
|
|
|
|
return False
|
2000-03-31 10:58:54 -04:00
|
|
|
def get(self):
|
2000-06-30 04:50:59 -03:00
|
|
|
this = self.next
|
2000-07-01 20:49:14 -03:00
|
|
|
self.__next()
|
2000-06-30 04:50:59 -03:00
|
|
|
return this
|
2012-06-23 08:29:19 -03:00
|
|
|
def getwhile(self, n, charset):
|
|
|
|
result = ''
|
|
|
|
for _ in range(n):
|
|
|
|
c = self.next
|
|
|
|
if c not in charset:
|
|
|
|
break
|
|
|
|
result += c
|
|
|
|
self.__next()
|
|
|
|
return result
|
2014-10-10 05:14:49 -03:00
|
|
|
def getuntil(self, terminator):
|
|
|
|
result = ''
|
|
|
|
while True:
|
|
|
|
c = self.next
|
|
|
|
self.__next()
|
|
|
|
if c is None:
|
2015-03-25 16:03:47 -03:00
|
|
|
if not result:
|
|
|
|
raise self.error("missing group name")
|
|
|
|
raise self.error("missing %s, unterminated name" % terminator,
|
|
|
|
len(result))
|
2014-10-10 05:14:49 -03:00
|
|
|
if c == terminator:
|
2015-03-25 16:03:47 -03:00
|
|
|
if not result:
|
|
|
|
raise self.error("missing group name", 1)
|
2014-10-10 05:14:49 -03:00
|
|
|
break
|
|
|
|
result += c
|
|
|
|
return result
|
2016-09-11 06:50:02 -03:00
|
|
|
@property
|
|
|
|
def pos(self):
|
|
|
|
return self.index - len(self.next or '')
|
2000-07-01 20:49:14 -03:00
|
|
|
def tell(self):
|
2014-11-10 07:49:00 -04:00
|
|
|
return self.index - len(self.next or '')
|
2000-07-01 20:49:14 -03:00
|
|
|
def seek(self, index):
|
2014-11-10 07:49:00 -04:00
|
|
|
self.index = index
|
|
|
|
self.__next()
|
|
|
|
|
|
|
|
def error(self, msg, offset=0):
|
|
|
|
return error(msg, self.string, self.tell() - offset)
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2000-06-09 11:08:07 -03:00
|
|
|
def _class_escape(source, escape):
|
|
|
|
# handle escape code inside character class
|
|
|
|
code = ESCAPES.get(escape)
|
|
|
|
if code:
|
2000-06-30 04:50:59 -03:00
|
|
|
return code
|
2000-06-09 11:08:07 -03:00
|
|
|
code = CATEGORIES.get(escape)
|
2014-10-10 05:14:49 -03:00
|
|
|
if code and code[0] is IN:
|
2000-06-30 04:50:59 -03:00
|
|
|
return code
|
2000-06-09 11:08:07 -03:00
|
|
|
try:
|
2004-09-03 14:06:10 -03:00
|
|
|
c = escape[1:2]
|
|
|
|
if c == "x":
|
2000-08-31 19:57:55 -03:00
|
|
|
# hexadecimal escape (exactly two digits)
|
2012-06-23 08:29:19 -03:00
|
|
|
escape += source.getwhile(2, HEXDIGITS)
|
|
|
|
if len(escape) != 4:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("incomplete escape %s" % escape, len(escape))
|
2014-10-10 05:14:49 -03:00
|
|
|
return LITERAL, int(escape[2:], 16)
|
2012-06-23 08:29:19 -03:00
|
|
|
elif c == "u" and source.istext:
|
|
|
|
# unicode escape (exactly four digits)
|
|
|
|
escape += source.getwhile(4, HEXDIGITS)
|
|
|
|
if len(escape) != 6:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("incomplete escape %s" % escape, len(escape))
|
2012-06-23 08:29:19 -03:00
|
|
|
return LITERAL, int(escape[2:], 16)
|
|
|
|
elif c == "U" and source.istext:
|
|
|
|
# unicode escape (exactly eight digits)
|
|
|
|
escape += source.getwhile(8, HEXDIGITS)
|
|
|
|
if len(escape) != 10:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("incomplete escape %s" % escape, len(escape))
|
2012-06-23 08:29:19 -03:00
|
|
|
c = int(escape[2:], 16)
|
|
|
|
chr(c) # raise ValueError for invalid code
|
|
|
|
return LITERAL, c
|
2004-09-03 14:06:10 -03:00
|
|
|
elif c in OCTDIGITS:
|
2000-08-31 19:57:55 -03:00
|
|
|
# octal escape (up to three digits)
|
2012-06-23 08:29:19 -03:00
|
|
|
escape += source.getwhile(2, OCTDIGITS)
|
2014-09-23 17:22:41 -03:00
|
|
|
c = int(escape[1:], 8)
|
|
|
|
if c > 0o377:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error('octal escape value %s outside of '
|
2014-11-10 07:49:00 -04:00
|
|
|
'range 0-0o377' % escape, len(escape))
|
2014-09-23 17:22:41 -03:00
|
|
|
return LITERAL, c
|
2004-09-03 14:06:10 -03:00
|
|
|
elif c in DIGITS:
|
2012-06-23 08:29:19 -03:00
|
|
|
raise ValueError
|
2000-06-30 04:50:59 -03:00
|
|
|
if len(escape) == 2:
|
2015-03-24 17:58:14 -03:00
|
|
|
if c in ASCIILETTERS:
|
2016-06-11 13:15:00 -03:00
|
|
|
raise source.error('bad escape %s' % escape, len(escape))
|
2000-06-30 10:55:15 -03:00
|
|
|
return LITERAL, ord(escape[1])
|
2000-06-09 11:08:07 -03:00
|
|
|
except ValueError:
|
2000-06-30 04:50:59 -03:00
|
|
|
pass
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("bad escape %s" % escape, len(escape))
|
2000-06-09 11:08:07 -03:00
|
|
|
|
|
|
|
def _escape(source, escape, state):
|
|
|
|
# handle escape code in expression
|
|
|
|
code = CATEGORIES.get(escape)
|
|
|
|
if code:
|
2000-06-30 04:50:59 -03:00
|
|
|
return code
|
2000-06-09 11:08:07 -03:00
|
|
|
code = ESCAPES.get(escape)
|
2000-03-31 10:58:54 -04:00
|
|
|
if code:
|
2000-06-30 04:50:59 -03:00
|
|
|
return code
|
2000-03-31 10:58:54 -04:00
|
|
|
try:
|
2004-09-03 14:06:10 -03:00
|
|
|
c = escape[1:2]
|
|
|
|
if c == "x":
|
2000-08-31 19:57:55 -03:00
|
|
|
# hexadecimal escape
|
2012-06-23 08:29:19 -03:00
|
|
|
escape += source.getwhile(2, HEXDIGITS)
|
2000-09-02 08:03:34 -03:00
|
|
|
if len(escape) != 4:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("incomplete escape %s" % escape, len(escape))
|
2014-10-10 05:14:49 -03:00
|
|
|
return LITERAL, int(escape[2:], 16)
|
2012-06-23 08:29:19 -03:00
|
|
|
elif c == "u" and source.istext:
|
|
|
|
# unicode escape (exactly four digits)
|
|
|
|
escape += source.getwhile(4, HEXDIGITS)
|
|
|
|
if len(escape) != 6:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("incomplete escape %s" % escape, len(escape))
|
2012-06-23 08:29:19 -03:00
|
|
|
return LITERAL, int(escape[2:], 16)
|
|
|
|
elif c == "U" and source.istext:
|
|
|
|
# unicode escape (exactly eight digits)
|
|
|
|
escape += source.getwhile(8, HEXDIGITS)
|
|
|
|
if len(escape) != 10:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("incomplete escape %s" % escape, len(escape))
|
2012-06-23 08:29:19 -03:00
|
|
|
c = int(escape[2:], 16)
|
|
|
|
chr(c) # raise ValueError for invalid code
|
|
|
|
return LITERAL, c
|
2004-09-03 14:06:10 -03:00
|
|
|
elif c == "0":
|
2000-08-31 19:57:55 -03:00
|
|
|
# octal escape
|
2012-06-23 08:29:19 -03:00
|
|
|
escape += source.getwhile(2, OCTDIGITS)
|
2014-09-23 17:22:41 -03:00
|
|
|
return LITERAL, int(escape[1:], 8)
|
2004-09-03 14:06:10 -03:00
|
|
|
elif c in DIGITS:
|
2000-08-31 19:57:55 -03:00
|
|
|
# octal escape *or* decimal group reference (sigh)
|
|
|
|
if source.next in DIGITS:
|
2014-10-10 05:14:49 -03:00
|
|
|
escape += source.get()
|
2000-09-02 08:03:34 -03:00
|
|
|
if (escape[1] in OCTDIGITS and escape[2] in OCTDIGITS and
|
|
|
|
source.next in OCTDIGITS):
|
2000-08-31 19:57:55 -03:00
|
|
|
# got three octal digits; this is an octal escape
|
2014-10-10 05:14:49 -03:00
|
|
|
escape += source.get()
|
2014-09-23 17:22:41 -03:00
|
|
|
c = int(escape[1:], 8)
|
|
|
|
if c > 0o377:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error('octal escape value %s outside of '
|
2014-11-10 07:49:00 -04:00
|
|
|
'range 0-0o377' % escape,
|
|
|
|
len(escape))
|
2014-09-23 17:22:41 -03:00
|
|
|
return LITERAL, c
|
2004-09-03 14:06:10 -03:00
|
|
|
# not an octal escape, so this is a group reference
|
|
|
|
group = int(escape[1:])
|
|
|
|
if group < state.groups:
|
2000-10-28 16:30:41 -03:00
|
|
|
if not state.checkgroup(group):
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("cannot refer to an open group",
|
2014-11-10 07:49:00 -04:00
|
|
|
len(escape))
|
2015-02-21 04:07:35 -04:00
|
|
|
state.checklookbehindgroup(group, source)
|
2000-08-31 19:57:55 -03:00
|
|
|
return GROUPREF, group
|
2016-10-23 06:11:19 -03:00
|
|
|
raise source.error("invalid group reference %d" % group, len(escape) - 1)
|
2000-06-30 04:50:59 -03:00
|
|
|
if len(escape) == 2:
|
2015-03-24 17:58:14 -03:00
|
|
|
if c in ASCIILETTERS:
|
2016-06-11 13:15:00 -03:00
|
|
|
raise source.error("bad escape %s" % escape, len(escape))
|
2000-06-30 10:55:15 -03:00
|
|
|
return LITERAL, ord(escape[1])
|
2000-03-31 10:58:54 -04:00
|
|
|
except ValueError:
|
2000-06-30 04:50:59 -03:00
|
|
|
pass
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("bad escape %s" % escape, len(escape))
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2017-05-14 02:32:33 -03:00
|
|
|
def _uniq(items):
|
|
|
|
if len(set(items)) == len(items):
|
|
|
|
return items
|
|
|
|
newitems = []
|
|
|
|
for item in items:
|
|
|
|
if item not in newitems:
|
|
|
|
newitems.append(item)
|
|
|
|
return newitems
|
|
|
|
|
2017-05-16 09:16:15 -03:00
|
|
|
def _parse_sub(source, state, verbose, nested):
|
2000-07-23 18:46:17 -03:00
|
|
|
# parse an alternation: a|b|c
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2000-07-23 18:46:17 -03:00
|
|
|
items = []
|
2004-03-26 19:24:00 -04:00
|
|
|
itemsappend = items.append
|
|
|
|
sourcematch = source.match
|
2015-03-25 16:03:47 -03:00
|
|
|
start = source.tell()
|
2014-10-10 05:14:49 -03:00
|
|
|
while True:
|
2017-05-16 09:16:15 -03:00
|
|
|
itemsappend(_parse(source, state, verbose, nested + 1,
|
|
|
|
not nested and not items))
|
2014-10-10 05:14:49 -03:00
|
|
|
if not sourcematch("|"):
|
2000-07-23 18:46:17 -03:00
|
|
|
break
|
|
|
|
|
|
|
|
if len(items) == 1:
|
|
|
|
return items[0]
|
|
|
|
|
|
|
|
subpattern = SubPattern(state)
|
2000-06-09 11:08:07 -03:00
|
|
|
|
2000-03-31 10:58:54 -04:00
|
|
|
# check if all items share a common prefix
|
2014-10-10 05:14:49 -03:00
|
|
|
while True:
|
2000-06-30 04:50:59 -03:00
|
|
|
prefix = None
|
|
|
|
for item in items:
|
|
|
|
if not item:
|
|
|
|
break
|
|
|
|
if prefix is None:
|
|
|
|
prefix = item[0]
|
|
|
|
elif item[0] != prefix:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# all subitems start with a common "prefix".
|
|
|
|
# move it out of the branch
|
|
|
|
for item in items:
|
|
|
|
del item[0]
|
2017-05-14 02:32:33 -03:00
|
|
|
subpattern.append(prefix)
|
2000-06-30 04:50:59 -03:00
|
|
|
continue # check next one
|
|
|
|
break
|
2000-03-31 10:58:54 -04:00
|
|
|
|
|
|
|
# check if the branch can be replaced by a character set
|
2017-05-14 02:32:33 -03:00
|
|
|
set = []
|
2000-03-31 10:58:54 -04:00
|
|
|
for item in items:
|
2017-05-14 02:32:33 -03:00
|
|
|
if len(item) != 1:
|
|
|
|
break
|
|
|
|
op, av = item[0]
|
|
|
|
if op is LITERAL:
|
|
|
|
set.append((op, av))
|
|
|
|
elif op is IN and av[0][0] is not NEGATE:
|
|
|
|
set.extend(av)
|
|
|
|
else:
|
2000-06-30 04:50:59 -03:00
|
|
|
break
|
2000-03-31 10:58:54 -04:00
|
|
|
else:
|
2000-06-30 04:50:59 -03:00
|
|
|
# we can store this as a character set instead of a
|
2000-07-23 18:46:17 -03:00
|
|
|
# branch (the compiler may optimize this even more)
|
2017-05-14 02:32:33 -03:00
|
|
|
subpattern.append((IN, _uniq(set)))
|
2000-06-30 04:50:59 -03:00
|
|
|
return subpattern
|
2000-03-31 10:58:54 -04:00
|
|
|
|
|
|
|
subpattern.append((BRANCH, (None, items)))
|
2000-06-09 11:08:07 -03:00
|
|
|
return subpattern
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2017-05-16 09:16:15 -03:00
|
|
|
def _parse(source, state, verbose, nested, first=False):
|
2000-07-23 18:46:17 -03:00
|
|
|
# parse a simple pattern
|
2000-06-09 11:08:07 -03:00
|
|
|
subpattern = SubPattern(state)
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2004-03-26 19:24:00 -04:00
|
|
|
# precompute constants into local variables
|
|
|
|
subpatternappend = subpattern.append
|
|
|
|
sourceget = source.get
|
|
|
|
sourcematch = source.match
|
|
|
|
_len = len
|
2014-10-10 05:14:49 -03:00
|
|
|
_ord = ord
|
2004-03-26 19:24:00 -04:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
while True:
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
this = source.next
|
2000-06-30 04:50:59 -03:00
|
|
|
if this is None:
|
|
|
|
break # end of pattern
|
2014-10-10 05:14:49 -03:00
|
|
|
if this in "|)":
|
|
|
|
break # end of subpattern
|
|
|
|
sourceget()
|
2000-06-30 04:50:59 -03:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
if verbose:
|
2000-06-30 04:50:59 -03:00
|
|
|
# skip whitespace and comments
|
|
|
|
if this in WHITESPACE:
|
|
|
|
continue
|
|
|
|
if this == "#":
|
2014-10-10 05:14:49 -03:00
|
|
|
while True:
|
2004-03-26 19:24:00 -04:00
|
|
|
this = sourceget()
|
2014-10-10 05:14:49 -03:00
|
|
|
if this is None or this == "\n":
|
2000-06-30 04:50:59 -03:00
|
|
|
break
|
|
|
|
continue
|
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
if this[0] == "\\":
|
|
|
|
code = _escape(source, this, state)
|
|
|
|
subpatternappend(code)
|
|
|
|
|
|
|
|
elif this not in SPECIAL_CHARS:
|
|
|
|
subpatternappend((LITERAL, _ord(this)))
|
2000-06-30 04:50:59 -03:00
|
|
|
|
|
|
|
elif this == "[":
|
2015-03-25 16:03:47 -03:00
|
|
|
here = source.tell() - 1
|
2000-06-30 04:50:59 -03:00
|
|
|
# character set
|
|
|
|
set = []
|
2004-03-26 19:24:00 -04:00
|
|
|
setappend = set.append
|
|
|
|
## if sourcematch(":"):
|
2000-06-30 04:50:59 -03:00
|
|
|
## pass # handle character classes
|
2017-05-14 02:32:33 -03:00
|
|
|
negate = sourcematch("^")
|
2000-06-30 04:50:59 -03:00
|
|
|
# check remaining characters
|
2014-10-10 05:14:49 -03:00
|
|
|
while True:
|
2004-03-26 19:24:00 -04:00
|
|
|
this = sourceget()
|
2014-10-10 05:14:49 -03:00
|
|
|
if this is None:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("unterminated character set",
|
|
|
|
source.tell() - here)
|
2017-05-14 02:32:33 -03:00
|
|
|
if this == "]" and set:
|
2000-06-30 04:50:59 -03:00
|
|
|
break
|
2014-10-10 05:14:49 -03:00
|
|
|
elif this[0] == "\\":
|
2000-06-30 04:50:59 -03:00
|
|
|
code1 = _class_escape(source, this)
|
|
|
|
else:
|
2014-10-10 05:14:49 -03:00
|
|
|
code1 = LITERAL, _ord(this)
|
2004-03-26 19:24:00 -04:00
|
|
|
if sourcematch("-"):
|
2000-06-30 04:50:59 -03:00
|
|
|
# potential range
|
2015-03-25 16:03:47 -03:00
|
|
|
that = sourceget()
|
|
|
|
if that is None:
|
|
|
|
raise source.error("unterminated character set",
|
|
|
|
source.tell() - here)
|
|
|
|
if that == "]":
|
2000-10-07 07:16:19 -03:00
|
|
|
if code1[0] is IN:
|
|
|
|
code1 = code1[1][0]
|
2004-03-26 19:24:00 -04:00
|
|
|
setappend(code1)
|
2014-10-10 05:14:49 -03:00
|
|
|
setappend((LITERAL, _ord("-")))
|
2000-06-30 04:50:59 -03:00
|
|
|
break
|
2015-03-25 16:03:47 -03:00
|
|
|
if that[0] == "\\":
|
|
|
|
code2 = _class_escape(source, that)
|
2003-04-14 14:59:34 -03:00
|
|
|
else:
|
2015-03-25 16:03:47 -03:00
|
|
|
code2 = LITERAL, _ord(that)
|
2014-10-10 05:14:49 -03:00
|
|
|
if code1[0] != LITERAL or code2[0] != LITERAL:
|
2015-03-25 16:03:47 -03:00
|
|
|
msg = "bad character range %s-%s" % (this, that)
|
|
|
|
raise source.error(msg, len(this) + 1 + len(that))
|
2014-10-10 05:14:49 -03:00
|
|
|
lo = code1[1]
|
|
|
|
hi = code2[1]
|
|
|
|
if hi < lo:
|
2015-03-25 16:03:47 -03:00
|
|
|
msg = "bad character range %s-%s" % (this, that)
|
|
|
|
raise source.error(msg, len(this) + 1 + len(that))
|
2014-10-10 05:14:49 -03:00
|
|
|
setappend((RANGE, (lo, hi)))
|
2000-06-30 04:50:59 -03:00
|
|
|
else:
|
|
|
|
if code1[0] is IN:
|
|
|
|
code1 = code1[1][0]
|
2004-03-26 19:24:00 -04:00
|
|
|
setappend(code1)
|
2000-06-30 04:50:59 -03:00
|
|
|
|
2017-05-14 02:32:33 -03:00
|
|
|
set = _uniq(set)
|
2001-01-14 11:06:11 -04:00
|
|
|
# XXX: <fl> should move set optimization to compiler!
|
2017-05-14 02:32:33 -03:00
|
|
|
if _len(set) == 1 and set[0][0] is LITERAL:
|
|
|
|
# optimization
|
|
|
|
if negate:
|
|
|
|
subpatternappend((NOT_LITERAL, set[0][1]))
|
|
|
|
else:
|
|
|
|
subpatternappend(set[0])
|
2000-06-30 04:50:59 -03:00
|
|
|
else:
|
2017-05-14 02:32:33 -03:00
|
|
|
if negate:
|
|
|
|
set.insert(0, (NEGATE, None))
|
|
|
|
# charmap optimization can't be added here because
|
|
|
|
# global flags still are not known
|
2004-03-26 19:24:00 -04:00
|
|
|
subpatternappend((IN, set))
|
2000-06-30 04:50:59 -03:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
elif this in REPEAT_CHARS:
|
2000-06-30 04:50:59 -03:00
|
|
|
# repeat previous item
|
2014-11-10 07:49:00 -04:00
|
|
|
here = source.tell()
|
2000-06-30 04:50:59 -03:00
|
|
|
if this == "?":
|
|
|
|
min, max = 0, 1
|
|
|
|
elif this == "*":
|
|
|
|
min, max = 0, MAXREPEAT
|
2001-02-18 17:04:48 -04:00
|
|
|
|
2000-06-30 04:50:59 -03:00
|
|
|
elif this == "+":
|
|
|
|
min, max = 1, MAXREPEAT
|
|
|
|
elif this == "{":
|
2005-09-14 05:54:39 -03:00
|
|
|
if source.next == "}":
|
2014-10-10 05:14:49 -03:00
|
|
|
subpatternappend((LITERAL, _ord(this)))
|
2005-09-14 05:54:39 -03:00
|
|
|
continue
|
2017-05-14 02:32:33 -03:00
|
|
|
|
2000-06-30 04:50:59 -03:00
|
|
|
min, max = 0, MAXREPEAT
|
|
|
|
lo = hi = ""
|
|
|
|
while source.next in DIGITS:
|
2014-10-10 05:14:49 -03:00
|
|
|
lo += sourceget()
|
2004-03-26 19:24:00 -04:00
|
|
|
if sourcematch(","):
|
2000-06-30 04:50:59 -03:00
|
|
|
while source.next in DIGITS:
|
2014-10-10 05:14:49 -03:00
|
|
|
hi += sourceget()
|
2000-06-30 04:50:59 -03:00
|
|
|
else:
|
|
|
|
hi = lo
|
2004-03-26 19:24:00 -04:00
|
|
|
if not sourcematch("}"):
|
2014-10-10 05:14:49 -03:00
|
|
|
subpatternappend((LITERAL, _ord(this)))
|
2000-07-01 20:49:14 -03:00
|
|
|
source.seek(here)
|
|
|
|
continue
|
2017-05-14 02:32:33 -03:00
|
|
|
|
2000-06-30 04:50:59 -03:00
|
|
|
if lo:
|
2004-08-24 23:22:30 -03:00
|
|
|
min = int(lo)
|
2013-02-16 10:47:47 -04:00
|
|
|
if min >= MAXREPEAT:
|
|
|
|
raise OverflowError("the repetition number is too large")
|
2000-06-30 04:50:59 -03:00
|
|
|
if hi:
|
2004-08-24 23:22:30 -03:00
|
|
|
max = int(hi)
|
2013-02-16 10:47:47 -04:00
|
|
|
if max >= MAXREPEAT:
|
|
|
|
raise OverflowError("the repetition number is too large")
|
|
|
|
if max < min:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("min repeat greater than max repeat",
|
2014-11-10 07:49:00 -04:00
|
|
|
source.tell() - here)
|
2000-06-30 04:50:59 -03:00
|
|
|
else:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise AssertionError("unsupported quantifier %r" % (char,))
|
2000-06-30 04:50:59 -03:00
|
|
|
# figure out which item to repeat
|
|
|
|
if subpattern:
|
|
|
|
item = subpattern[-1:]
|
|
|
|
else:
|
2001-02-18 17:04:48 -04:00
|
|
|
item = None
|
2017-05-14 02:32:33 -03:00
|
|
|
if not item or item[0][0] is AT:
|
2014-11-10 07:49:00 -04:00
|
|
|
raise source.error("nothing to repeat",
|
|
|
|
source.tell() - here + len(this))
|
2014-10-10 05:14:49 -03:00
|
|
|
if item[0][0] in _REPEATCODES:
|
2014-11-10 07:49:00 -04:00
|
|
|
raise source.error("multiple repeat",
|
|
|
|
source.tell() - here + len(this))
|
2017-05-14 02:32:33 -03:00
|
|
|
if item[0][0] is SUBPATTERN:
|
|
|
|
group, add_flags, del_flags, p = item[0][1]
|
|
|
|
if group is None and not add_flags and not del_flags:
|
|
|
|
item = p
|
2004-03-26 19:24:00 -04:00
|
|
|
if sourcematch("?"):
|
2000-06-30 04:50:59 -03:00
|
|
|
subpattern[-1] = (MIN_REPEAT, (min, max, item))
|
|
|
|
else:
|
|
|
|
subpattern[-1] = (MAX_REPEAT, (min, max, item))
|
|
|
|
|
|
|
|
elif this == ".":
|
2004-03-26 19:24:00 -04:00
|
|
|
subpatternappend((ANY, None))
|
2000-06-30 04:50:59 -03:00
|
|
|
|
|
|
|
elif this == "(":
|
2015-03-25 16:03:47 -03:00
|
|
|
start = source.tell() - 1
|
|
|
|
group = True
|
2000-06-30 04:50:59 -03:00
|
|
|
name = None
|
2016-09-09 18:57:55 -03:00
|
|
|
add_flags = 0
|
|
|
|
del_flags = 0
|
2004-03-26 19:24:00 -04:00
|
|
|
if sourcematch("?"):
|
2000-06-30 04:50:59 -03:00
|
|
|
# options
|
2014-10-10 05:14:49 -03:00
|
|
|
char = sourceget()
|
|
|
|
if char is None:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("unexpected end of pattern")
|
2014-10-10 05:14:49 -03:00
|
|
|
if char == "P":
|
2000-06-30 04:50:59 -03:00
|
|
|
# python extensions
|
2004-03-26 19:24:00 -04:00
|
|
|
if sourcematch("<"):
|
2000-06-30 04:50:59 -03:00
|
|
|
# named group: skip forward to end of name
|
2014-10-10 05:14:49 -03:00
|
|
|
name = source.getuntil(">")
|
2013-04-14 06:40:00 -03:00
|
|
|
if not name.isidentifier():
|
2015-03-25 16:03:47 -03:00
|
|
|
msg = "bad character in group name %r" % name
|
|
|
|
raise source.error(msg, len(name) + 1)
|
2004-03-26 19:24:00 -04:00
|
|
|
elif sourcematch("="):
|
2000-06-30 04:50:59 -03:00
|
|
|
# named backreference
|
2014-10-10 05:14:49 -03:00
|
|
|
name = source.getuntil(")")
|
2013-04-14 06:40:00 -03:00
|
|
|
if not name.isidentifier():
|
2015-03-25 16:03:47 -03:00
|
|
|
msg = "bad character in group name %r" % name
|
|
|
|
raise source.error(msg, len(name) + 1)
|
2000-06-30 06:13:06 -03:00
|
|
|
gid = state.groupdict.get(name)
|
|
|
|
if gid is None:
|
2015-03-25 16:03:47 -03:00
|
|
|
msg = "unknown group name %r" % name
|
2014-11-10 07:49:00 -04:00
|
|
|
raise source.error(msg, len(name) + 1)
|
2015-07-18 17:27:00 -03:00
|
|
|
if not state.checkgroup(gid):
|
|
|
|
raise source.error("cannot refer to an open group",
|
|
|
|
len(name) + 1)
|
2015-02-21 04:07:35 -04:00
|
|
|
state.checklookbehindgroup(gid, source)
|
2004-03-26 19:24:00 -04:00
|
|
|
subpatternappend((GROUPREF, gid))
|
2000-07-02 14:33:27 -03:00
|
|
|
continue
|
2017-05-14 02:32:33 -03:00
|
|
|
|
2000-06-30 04:50:59 -03:00
|
|
|
else:
|
2004-03-26 19:24:00 -04:00
|
|
|
char = sourceget()
|
2000-06-30 04:50:59 -03:00
|
|
|
if char is None:
|
2014-11-10 07:49:00 -04:00
|
|
|
raise source.error("unexpected end of pattern")
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("unknown extension ?P" + char,
|
|
|
|
len(char) + 2)
|
2014-10-10 05:14:49 -03:00
|
|
|
elif char == ":":
|
2000-06-30 04:50:59 -03:00
|
|
|
# non-capturing group
|
2015-03-25 16:03:47 -03:00
|
|
|
group = None
|
2014-10-10 05:14:49 -03:00
|
|
|
elif char == "#":
|
2000-06-30 04:50:59 -03:00
|
|
|
# comment
|
2014-10-10 05:14:49 -03:00
|
|
|
while True:
|
|
|
|
if source.next is None:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("missing ), unterminated comment",
|
|
|
|
source.tell() - start)
|
2014-10-10 05:14:49 -03:00
|
|
|
if sourceget() == ")":
|
2000-06-30 04:50:59 -03:00
|
|
|
break
|
2000-08-31 19:57:55 -03:00
|
|
|
continue
|
2017-05-14 02:32:33 -03:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
elif char in "=!<":
|
2000-06-30 07:41:31 -03:00
|
|
|
# lookahead assertions
|
2000-07-03 15:44:21 -03:00
|
|
|
dir = 1
|
|
|
|
if char == "<":
|
2014-10-10 05:14:49 -03:00
|
|
|
char = sourceget()
|
2015-03-25 16:03:47 -03:00
|
|
|
if char is None:
|
|
|
|
raise source.error("unexpected end of pattern")
|
|
|
|
if char not in "=!":
|
|
|
|
raise source.error("unknown extension ?<" + char,
|
|
|
|
len(char) + 2)
|
2000-07-03 15:44:21 -03:00
|
|
|
dir = -1 # lookbehind
|
2015-02-21 04:07:35 -04:00
|
|
|
lookbehindgroups = state.lookbehindgroups
|
|
|
|
if lookbehindgroups is None:
|
|
|
|
state.lookbehindgroups = state.groups
|
2017-05-16 09:16:15 -03:00
|
|
|
p = _parse_sub(source, state, verbose, nested + 1)
|
2015-02-21 04:07:35 -04:00
|
|
|
if dir < 0:
|
|
|
|
if lookbehindgroups is None:
|
|
|
|
state.lookbehindgroups = None
|
2004-03-26 19:24:00 -04:00
|
|
|
if not sourcematch(")"):
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("missing ), unterminated subpattern",
|
|
|
|
source.tell() - start)
|
2000-07-23 18:46:17 -03:00
|
|
|
if char == "=":
|
2004-03-26 19:24:00 -04:00
|
|
|
subpatternappend((ASSERT, (dir, p)))
|
2000-07-23 18:46:17 -03:00
|
|
|
else:
|
2004-03-26 19:24:00 -04:00
|
|
|
subpatternappend((ASSERT_NOT, (dir, p)))
|
2000-07-23 18:46:17 -03:00
|
|
|
continue
|
2017-05-14 02:32:33 -03:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
elif char == "(":
|
2003-10-17 19:13:16 -03:00
|
|
|
# conditional backreference group
|
2014-10-10 05:14:49 -03:00
|
|
|
condname = source.getuntil(")")
|
2013-04-14 06:40:00 -03:00
|
|
|
if condname.isidentifier():
|
2003-10-17 19:13:16 -03:00
|
|
|
condgroup = state.groupdict.get(condname)
|
|
|
|
if condgroup is None:
|
2015-03-25 16:03:47 -03:00
|
|
|
msg = "unknown group name %r" % condname
|
2014-11-10 07:49:00 -04:00
|
|
|
raise source.error(msg, len(condname) + 1)
|
2003-10-17 19:13:16 -03:00
|
|
|
else:
|
|
|
|
try:
|
2004-08-24 23:22:30 -03:00
|
|
|
condgroup = int(condname)
|
2014-09-29 16:49:23 -03:00
|
|
|
if condgroup < 0:
|
|
|
|
raise ValueError
|
2003-10-17 19:13:16 -03:00
|
|
|
except ValueError:
|
2015-03-25 16:03:47 -03:00
|
|
|
msg = "bad character in group name %r" % condname
|
|
|
|
raise source.error(msg, len(condname) + 1) from None
|
2014-09-29 16:49:23 -03:00
|
|
|
if not condgroup:
|
2014-11-10 07:49:00 -04:00
|
|
|
raise source.error("bad group number",
|
|
|
|
len(condname) + 1)
|
2014-09-29 16:49:23 -03:00
|
|
|
if condgroup >= MAXGROUPS:
|
2016-10-23 06:11:19 -03:00
|
|
|
msg = "invalid group reference %d" % condgroup
|
|
|
|
raise source.error(msg, len(condname) + 1)
|
2015-02-21 04:07:35 -04:00
|
|
|
state.checklookbehindgroup(condgroup, source)
|
2017-05-16 09:16:15 -03:00
|
|
|
item_yes = _parse(source, state, verbose, nested + 1)
|
2017-05-14 02:32:33 -03:00
|
|
|
if source.match("|"):
|
2017-05-16 09:16:15 -03:00
|
|
|
item_no = _parse(source, state, verbose, nested + 1)
|
2017-05-14 02:32:33 -03:00
|
|
|
if source.next == "|":
|
|
|
|
raise source.error("conditional backref with more than two branches")
|
|
|
|
else:
|
|
|
|
item_no = None
|
|
|
|
if not source.match(")"):
|
|
|
|
raise source.error("missing ), unterminated subpattern",
|
|
|
|
source.tell() - start)
|
|
|
|
subpatternappend((GROUPREF_EXISTS, (condgroup, item_yes, item_no)))
|
|
|
|
continue
|
|
|
|
|
2016-09-09 18:57:55 -03:00
|
|
|
elif char in FLAGS or char == "-":
|
2000-06-30 04:50:59 -03:00
|
|
|
# flags
|
2016-09-09 18:57:55 -03:00
|
|
|
flags = _parse_flags(source, state, char)
|
|
|
|
if flags is None: # global flags
|
2017-05-10 00:05:20 -03:00
|
|
|
if not first or subpattern:
|
2016-09-11 06:50:02 -03:00
|
|
|
import warnings
|
2016-09-16 19:29:58 -03:00
|
|
|
warnings.warn(
|
2017-06-10 02:01:16 -03:00
|
|
|
'Flags not at the start of the expression %r%s' % (
|
2016-09-16 19:29:58 -03:00
|
|
|
source.string[:20], # truncate long regexes
|
|
|
|
' (truncated)' if len(source.string) > 20 else '',
|
|
|
|
),
|
2017-05-16 09:16:15 -03:00
|
|
|
DeprecationWarning, stacklevel=nested + 6
|
2016-09-16 19:29:58 -03:00
|
|
|
)
|
2017-05-10 00:05:20 -03:00
|
|
|
if (state.flags & SRE_FLAG_VERBOSE) and not verbose:
|
|
|
|
raise Verbose
|
2016-09-09 18:57:55 -03:00
|
|
|
continue
|
2017-05-14 02:32:33 -03:00
|
|
|
|
2016-09-09 18:57:55 -03:00
|
|
|
add_flags, del_flags = flags
|
|
|
|
group = None
|
2014-10-10 05:14:49 -03:00
|
|
|
else:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise source.error("unknown extension ?" + char,
|
|
|
|
len(char) + 1)
|
|
|
|
|
|
|
|
# parse group contents
|
|
|
|
if group is not None:
|
|
|
|
try:
|
|
|
|
group = state.opengroup(name)
|
|
|
|
except error as err:
|
|
|
|
raise source.error(err.msg, len(name) + 1) from None
|
2017-05-14 02:32:33 -03:00
|
|
|
sub_verbose = ((verbose or (add_flags & SRE_FLAG_VERBOSE)) and
|
|
|
|
not (del_flags & SRE_FLAG_VERBOSE))
|
2017-05-16 09:16:15 -03:00
|
|
|
p = _parse_sub(source, state, sub_verbose, nested + 1)
|
2015-03-25 16:03:47 -03:00
|
|
|
if not source.match(")"):
|
|
|
|
raise source.error("missing ), unterminated subpattern",
|
|
|
|
source.tell() - start)
|
|
|
|
if group is not None:
|
|
|
|
state.closegroup(group, p)
|
2016-09-09 18:57:55 -03:00
|
|
|
subpatternappend((SUBPATTERN, (group, add_flags, del_flags, p)))
|
2000-06-30 04:50:59 -03:00
|
|
|
|
|
|
|
elif this == "^":
|
2004-03-26 19:24:00 -04:00
|
|
|
subpatternappend((AT, AT_BEGINNING))
|
2000-06-30 04:50:59 -03:00
|
|
|
|
|
|
|
elif this == "$":
|
2017-05-14 02:32:33 -03:00
|
|
|
subpatternappend((AT, AT_END))
|
2000-06-30 04:50:59 -03:00
|
|
|
|
|
|
|
else:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise AssertionError("unsupported special character %r" % (char,))
|
2000-03-31 10:58:54 -04:00
|
|
|
|
2017-05-14 02:32:33 -03:00
|
|
|
# unpack non-capturing groups
|
|
|
|
for i in range(len(subpattern))[::-1]:
|
|
|
|
op, av = subpattern[i]
|
|
|
|
if op is SUBPATTERN:
|
|
|
|
group, add_flags, del_flags, p = av
|
|
|
|
if group is None and not add_flags and not del_flags:
|
|
|
|
subpattern[i: i+1] = p
|
|
|
|
|
2000-03-31 10:58:54 -04:00
|
|
|
return subpattern
|
|
|
|
|
2016-09-09 18:57:55 -03:00
|
|
|
def _parse_flags(source, state, char):
|
|
|
|
sourceget = source.get
|
|
|
|
add_flags = 0
|
|
|
|
del_flags = 0
|
|
|
|
if char != "-":
|
|
|
|
while True:
|
|
|
|
add_flags |= FLAGS[char]
|
|
|
|
char = sourceget()
|
|
|
|
if char is None:
|
|
|
|
raise source.error("missing -, : or )")
|
|
|
|
if char in ")-:":
|
|
|
|
break
|
|
|
|
if char not in FLAGS:
|
|
|
|
msg = "unknown flag" if char.isalpha() else "missing -, : or )"
|
|
|
|
raise source.error(msg, len(char))
|
|
|
|
if char == ")":
|
|
|
|
state.flags |= add_flags
|
|
|
|
return None
|
|
|
|
if add_flags & GLOBAL_FLAGS:
|
|
|
|
raise source.error("bad inline flags: cannot turn on global flag", 1)
|
|
|
|
if char == "-":
|
|
|
|
char = sourceget()
|
|
|
|
if char is None:
|
|
|
|
raise source.error("missing flag")
|
|
|
|
if char not in FLAGS:
|
|
|
|
msg = "unknown flag" if char.isalpha() else "missing flag"
|
|
|
|
raise source.error(msg, len(char))
|
|
|
|
while True:
|
|
|
|
del_flags |= FLAGS[char]
|
|
|
|
char = sourceget()
|
|
|
|
if char is None:
|
|
|
|
raise source.error("missing :")
|
|
|
|
if char == ":":
|
|
|
|
break
|
|
|
|
if char not in FLAGS:
|
|
|
|
msg = "unknown flag" if char.isalpha() else "missing :"
|
|
|
|
raise source.error(msg, len(char))
|
|
|
|
assert char == ":"
|
|
|
|
if del_flags & GLOBAL_FLAGS:
|
|
|
|
raise source.error("bad inline flags: cannot turn off global flag", 1)
|
|
|
|
if add_flags & del_flags:
|
|
|
|
raise source.error("bad inline flags: flag turned on and off", 1)
|
|
|
|
return add_flags, del_flags
|
|
|
|
|
2008-08-19 14:56:33 -03:00
|
|
|
def fix_flags(src, flags):
|
|
|
|
# Check and fix flags according to the type of pattern (str or bytes)
|
|
|
|
if isinstance(src, str):
|
2014-12-01 05:50:07 -04:00
|
|
|
if flags & SRE_FLAG_LOCALE:
|
2016-06-11 13:15:00 -03:00
|
|
|
raise ValueError("cannot use LOCALE flag with a str pattern")
|
2008-08-19 14:56:33 -03:00
|
|
|
if not flags & SRE_FLAG_ASCII:
|
|
|
|
flags |= SRE_FLAG_UNICODE
|
|
|
|
elif flags & SRE_FLAG_UNICODE:
|
|
|
|
raise ValueError("ASCII and UNICODE flags are incompatible")
|
|
|
|
else:
|
|
|
|
if flags & SRE_FLAG_UNICODE:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise ValueError("cannot use UNICODE flag with a bytes pattern")
|
2014-12-01 05:50:07 -04:00
|
|
|
if flags & SRE_FLAG_LOCALE and flags & SRE_FLAG_ASCII:
|
2016-06-11 13:15:00 -03:00
|
|
|
raise ValueError("ASCII and LOCALE flags are incompatible")
|
2008-08-19 14:56:33 -03:00
|
|
|
return flags
|
|
|
|
|
2000-08-07 17:59:04 -03:00
|
|
|
def parse(str, flags=0, pattern=None):
|
2000-06-09 11:08:07 -03:00
|
|
|
# parse 're' pattern into list of (opcode, argument) tuples
|
2000-07-23 18:46:17 -03:00
|
|
|
|
|
|
|
source = Tokenizer(str)
|
|
|
|
|
2000-08-07 17:59:04 -03:00
|
|
|
if pattern is None:
|
|
|
|
pattern = Pattern()
|
2000-07-23 18:46:17 -03:00
|
|
|
pattern.flags = flags
|
2001-01-14 17:00:44 -04:00
|
|
|
pattern.str = str
|
2000-07-23 18:46:17 -03:00
|
|
|
|
2016-09-09 18:57:55 -03:00
|
|
|
try:
|
2017-05-16 09:16:15 -03:00
|
|
|
p = _parse_sub(source, pattern, flags & SRE_FLAG_VERBOSE, 0)
|
2016-09-09 18:57:55 -03:00
|
|
|
except Verbose:
|
|
|
|
# the VERBOSE flag was switched on inside the pattern. to be
|
|
|
|
# on the safe side, we'll parse the whole thing again...
|
|
|
|
pattern = Pattern()
|
|
|
|
pattern.flags = flags | SRE_FLAG_VERBOSE
|
|
|
|
pattern.str = str
|
2016-09-10 19:39:01 -03:00
|
|
|
source.seek(0)
|
2017-05-16 09:16:15 -03:00
|
|
|
p = _parse_sub(source, pattern, True, 0)
|
2016-09-09 18:57:55 -03:00
|
|
|
|
2008-08-19 14:56:33 -03:00
|
|
|
p.pattern.flags = fix_flags(str, p.pattern.flags)
|
2000-07-23 18:46:17 -03:00
|
|
|
|
2014-10-10 05:14:49 -03:00
|
|
|
if source.next is not None:
|
2015-03-25 16:03:47 -03:00
|
|
|
assert source.next == ")"
|
|
|
|
raise source.error("unbalanced parenthesis")
|
2000-07-23 18:46:17 -03:00
|
|
|
|
2016-03-06 03:15:47 -04:00
|
|
|
if flags & SRE_FLAG_DEBUG:
|
|
|
|
p.dump()
|
|
|
|
|
2000-03-31 10:58:54 -04:00
|
|
|
return p
|
|
|
|
|
2000-06-29 05:58:44 -03:00
|
|
|
def parse_template(source, pattern):
|
2000-06-09 11:08:07 -03:00
|
|
|
# parse 're' replacement string into list of literals and
|
|
|
|
# group references
|
|
|
|
s = Tokenizer(source)
|
2004-03-26 19:24:00 -04:00
|
|
|
sget = s.get
|
2013-10-23 16:27:52 -03:00
|
|
|
groups = []
|
|
|
|
literals = []
|
|
|
|
literal = []
|
|
|
|
lappend = literal.append
|
2016-10-23 06:11:19 -03:00
|
|
|
def addgroup(index, pos):
|
|
|
|
if index > pattern.groups:
|
|
|
|
raise s.error("invalid group reference %d" % index, pos)
|
2013-10-23 16:27:52 -03:00
|
|
|
if literal:
|
|
|
|
literals.append(''.join(literal))
|
|
|
|
del literal[:]
|
|
|
|
groups.append((len(literals), index))
|
|
|
|
literals.append(None)
|
2015-03-29 19:01:48 -03:00
|
|
|
groupindex = pattern.groupindex
|
2013-10-23 16:27:52 -03:00
|
|
|
while True:
|
2004-03-26 19:24:00 -04:00
|
|
|
this = sget()
|
2000-06-30 04:50:59 -03:00
|
|
|
if this is None:
|
|
|
|
break # end of replacement string
|
2013-10-23 16:27:52 -03:00
|
|
|
if this[0] == "\\":
|
2000-06-30 04:50:59 -03:00
|
|
|
# group
|
2013-10-23 16:27:52 -03:00
|
|
|
c = this[1]
|
2004-09-03 14:06:10 -03:00
|
|
|
if c == "g":
|
2000-06-30 04:50:59 -03:00
|
|
|
name = ""
|
2015-03-25 16:03:47 -03:00
|
|
|
if not s.match("<"):
|
|
|
|
raise s.error("missing <")
|
|
|
|
name = s.getuntil(">")
|
|
|
|
if name.isidentifier():
|
2000-06-30 04:50:59 -03:00
|
|
|
try:
|
2015-03-29 19:01:48 -03:00
|
|
|
index = groupindex[name]
|
2000-06-30 04:50:59 -03:00
|
|
|
except KeyError:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise IndexError("unknown group name %r" % name)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
index = int(name)
|
|
|
|
if index < 0:
|
|
|
|
raise ValueError
|
|
|
|
except ValueError:
|
|
|
|
raise s.error("bad character in group name %r" % name,
|
|
|
|
len(name) + 1) from None
|
|
|
|
if index >= MAXGROUPS:
|
2016-10-23 06:11:19 -03:00
|
|
|
raise s.error("invalid group reference %d" % index,
|
2015-03-25 16:03:47 -03:00
|
|
|
len(name) + 1)
|
2016-10-23 06:11:19 -03:00
|
|
|
addgroup(index, len(name) + 1)
|
2004-09-03 14:06:10 -03:00
|
|
|
elif c == "0":
|
|
|
|
if s.next in OCTDIGITS:
|
2013-10-23 16:27:52 -03:00
|
|
|
this += sget()
|
2004-09-03 14:06:10 -03:00
|
|
|
if s.next in OCTDIGITS:
|
2013-10-23 16:27:52 -03:00
|
|
|
this += sget()
|
|
|
|
lappend(chr(int(this[1:], 8) & 0xff))
|
2004-09-03 14:06:10 -03:00
|
|
|
elif c in DIGITS:
|
|
|
|
isoctal = False
|
|
|
|
if s.next in DIGITS:
|
2013-10-23 16:27:52 -03:00
|
|
|
this += sget()
|
2004-09-03 17:15:56 -03:00
|
|
|
if (c in OCTDIGITS and this[2] in OCTDIGITS and
|
|
|
|
s.next in OCTDIGITS):
|
2013-10-23 16:27:52 -03:00
|
|
|
this += sget()
|
2004-09-03 14:06:10 -03:00
|
|
|
isoctal = True
|
2014-09-23 17:22:41 -03:00
|
|
|
c = int(this[1:], 8)
|
|
|
|
if c > 0o377:
|
2015-03-25 16:03:47 -03:00
|
|
|
raise s.error('octal escape value %s outside of '
|
2014-11-10 07:49:00 -04:00
|
|
|
'range 0-0o377' % this, len(this))
|
2014-09-23 17:22:41 -03:00
|
|
|
lappend(chr(c))
|
2004-09-03 14:06:10 -03:00
|
|
|
if not isoctal:
|
2016-10-23 06:11:19 -03:00
|
|
|
addgroup(int(this[1:]), len(this) - 1)
|
2000-06-30 04:50:59 -03:00
|
|
|
else:
|
|
|
|
try:
|
2013-10-23 16:27:52 -03:00
|
|
|
this = chr(ESCAPES[this][1])
|
2000-06-30 04:50:59 -03:00
|
|
|
except KeyError:
|
2015-03-24 17:58:14 -03:00
|
|
|
if c in ASCIILETTERS:
|
2016-06-11 13:15:00 -03:00
|
|
|
raise s.error('bad escape %s' % this, len(this))
|
2013-10-23 16:27:52 -03:00
|
|
|
lappend(this)
|
2000-06-30 04:50:59 -03:00
|
|
|
else:
|
2013-10-23 16:27:52 -03:00
|
|
|
lappend(this)
|
|
|
|
if literal:
|
|
|
|
literals.append(''.join(literal))
|
|
|
|
if not isinstance(source, str):
|
2010-03-06 11:24:08 -04:00
|
|
|
# The tokenizer implicitly decodes bytes objects as latin-1, we must
|
|
|
|
# therefore re-encode the final representation.
|
2013-10-23 16:27:52 -03:00
|
|
|
literals = [None if s is None else s.encode('latin-1') for s in literals]
|
2001-03-22 11:50:10 -04:00
|
|
|
return groups, literals
|
2000-06-09 11:08:07 -03:00
|
|
|
|
2000-06-29 05:58:44 -03:00
|
|
|
def expand_template(template, match):
|
2001-03-22 11:50:10 -04:00
|
|
|
g = match.group
|
2014-10-10 05:06:31 -03:00
|
|
|
empty = match.string[:0]
|
2001-03-22 11:50:10 -04:00
|
|
|
groups, literals = template
|
|
|
|
literals = literals[:]
|
|
|
|
try:
|
|
|
|
for index, group in groups:
|
2014-10-10 05:06:31 -03:00
|
|
|
literals[index] = g(group) or empty
|
2001-03-22 11:50:10 -04:00
|
|
|
except IndexError:
|
2016-10-23 06:11:19 -03:00
|
|
|
raise error("invalid group reference %d" % index)
|
2014-10-10 05:06:31 -03:00
|
|
|
return empty.join(literals)
|