2000-02-04 11:10:34 -04:00
|
|
|
"""Create portable serialized representations of Python objects.
|
1997-12-05 15:42:42 -04:00
|
|
|
|
|
|
|
See module cPickle for a (much) faster implementation.
|
|
|
|
See module copy_reg for a mechanism for registering custom picklers.
|
2003-01-27 16:16:36 -04:00
|
|
|
See module pickletools source for extensive comments.
|
1997-12-05 15:42:42 -04:00
|
|
|
|
|
|
|
Classes:
|
|
|
|
|
|
|
|
Pickler
|
|
|
|
Unpickler
|
|
|
|
|
|
|
|
Functions:
|
|
|
|
|
|
|
|
dump(object, file)
|
|
|
|
dumps(object) -> string
|
|
|
|
load(file) -> object
|
|
|
|
loads(string) -> object
|
|
|
|
|
|
|
|
Misc variables:
|
|
|
|
|
1998-02-12 23:24:48 -04:00
|
|
|
__version__
|
1997-12-05 15:42:42 -04:00
|
|
|
format_version
|
|
|
|
compatible_formats
|
|
|
|
|
1995-01-09 20:31:14 -04:00
|
|
|
"""
|
|
|
|
|
1998-09-15 17:25:57 -03:00
|
|
|
__version__ = "$Revision$" # Code version
|
1995-01-09 20:31:14 -04:00
|
|
|
|
|
|
|
from types import *
|
2003-01-28 18:29:13 -04:00
|
|
|
from copy_reg import dispatch_table, _reconstructor
|
2003-02-03 21:54:49 -04:00
|
|
|
from copy_reg import _extension_registry, _inverted_registry, _extension_cache
|
1998-10-22 17:15:36 -03:00
|
|
|
import marshal
|
|
|
|
import sys
|
|
|
|
import struct
|
2001-02-17 23:10:09 -04:00
|
|
|
import re
|
2003-01-28 12:34:19 -04:00
|
|
|
import warnings
|
1995-01-09 20:31:14 -04:00
|
|
|
|
2001-02-07 19:14:30 -04:00
|
|
|
__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
|
|
|
|
"Unpickler", "dump", "dumps", "load", "loads"]
|
|
|
|
|
2003-01-28 20:56:17 -04:00
|
|
|
# These are purely informational; no code uses these.
|
2003-01-27 18:47:53 -04:00
|
|
|
format_version = "2.0" # File format version we write
|
|
|
|
compatible_formats = ["1.0", # Original protocol 0
|
2003-01-28 12:34:19 -04:00
|
|
|
"1.1", # Protocol 0 with INST added
|
2003-01-27 18:47:53 -04:00
|
|
|
"1.2", # Original protocol 1
|
|
|
|
"1.3", # Protocol 1 with BINFLOAT added
|
|
|
|
"2.0", # Protocol 2
|
|
|
|
] # Old format versions we can read
|
1995-03-14 11:09:05 -04:00
|
|
|
|
2003-01-27 23:17:21 -04:00
|
|
|
# Why use struct.pack() for pickling but marshal.loads() for
|
2003-01-28 20:56:17 -04:00
|
|
|
# unpickling? struct.pack() is 40% faster than marshal.dumps(), but
|
2003-01-27 23:17:21 -04:00
|
|
|
# marshal.loads() is twice as fast as struct.unpack()!
|
1997-04-09 14:32:51 -03:00
|
|
|
mloads = marshal.loads
|
1995-03-09 10:08:35 -04:00
|
|
|
|
2002-05-29 13:18:42 -03:00
|
|
|
class PickleError(Exception):
|
2002-05-30 09:12:04 -03:00
|
|
|
"""A common base class for the other pickling exceptions."""
|
2002-05-29 13:18:42 -03:00
|
|
|
pass
|
|
|
|
|
|
|
|
class PicklingError(PickleError):
|
|
|
|
"""This exception is raised when an unpicklable object is passed to the
|
|
|
|
dump() method.
|
|
|
|
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
class UnpicklingError(PickleError):
|
|
|
|
"""This exception is raised when there is a problem unpickling an object,
|
|
|
|
such as a security violation.
|
|
|
|
|
|
|
|
Note that other exceptions may also be raised during unpickling, including
|
|
|
|
(but not necessarily limited to) AttributeError, EOFError, ImportError,
|
|
|
|
and IndexError.
|
|
|
|
|
|
|
|
"""
|
|
|
|
pass
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 20:56:17 -04:00
|
|
|
# An instance of _Stop is raised by Unpickler.load_stop() in response to
|
|
|
|
# the STOP opcode, passing the object that is the result of unpickling.
|
2000-12-13 14:11:56 -04:00
|
|
|
class _Stop(Exception):
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
|
2003-01-28 13:55:05 -04:00
|
|
|
# Jython has PyStringMap; it's a dict subclass with string keys
|
1998-05-27 19:38:22 -03:00
|
|
|
try:
|
|
|
|
from org.python.core import PyStringMap
|
|
|
|
except ImportError:
|
|
|
|
PyStringMap = None
|
|
|
|
|
2003-01-28 13:55:05 -04:00
|
|
|
# UnicodeType may or may not be exported (normally imported from types)
|
2001-09-21 16:22:34 -03:00
|
|
|
try:
|
|
|
|
UnicodeType
|
|
|
|
except NameError:
|
|
|
|
UnicodeType = None
|
|
|
|
|
2003-01-27 16:16:36 -04:00
|
|
|
# Pickle opcodes. See pickletools.py for extensive docs. The listing
|
|
|
|
# here is in kind-of alphabetical order of 1-character pickle code.
|
|
|
|
# pickletools groups them by purpose.
|
|
|
|
|
|
|
|
MARK = '(' # push special markobject on stack
|
|
|
|
STOP = '.' # every pickle ends with STOP
|
|
|
|
POP = '0' # discard topmost stack item
|
|
|
|
POP_MARK = '1' # discard stack top through topmost markobject
|
|
|
|
DUP = '2' # duplicate top stack item
|
|
|
|
FLOAT = 'F' # push float object; decimal string argument
|
|
|
|
INT = 'I' # push integer or bool; decimal string argument
|
|
|
|
BININT = 'J' # push four-byte signed int
|
|
|
|
BININT1 = 'K' # push 1-byte unsigned int
|
|
|
|
LONG = 'L' # push long; decimal string argument
|
|
|
|
BININT2 = 'M' # push 2-byte unsigned int
|
|
|
|
NONE = 'N' # push None
|
|
|
|
PERSID = 'P' # push persistent object; id is taken from string arg
|
|
|
|
BINPERSID = 'Q' # " " " ; " " " " stack
|
|
|
|
REDUCE = 'R' # apply callable to argtuple, both on stack
|
|
|
|
STRING = 'S' # push string; NL-terminated string argument
|
|
|
|
BINSTRING = 'T' # push string; counted binary string argument
|
|
|
|
SHORT_BINSTRING = 'U' # " " ; " " " " < 256 bytes
|
|
|
|
UNICODE = 'V' # push Unicode string; raw-unicode-escaped'd argument
|
|
|
|
BINUNICODE = 'X' # " " " ; counted UTF-8 string argument
|
|
|
|
APPEND = 'a' # append stack top to list below it
|
|
|
|
BUILD = 'b' # call __setstate__ or __dict__.update()
|
|
|
|
GLOBAL = 'c' # push self.find_class(modname, name); 2 string args
|
|
|
|
DICT = 'd' # build a dict from stack items
|
|
|
|
EMPTY_DICT = '}' # push empty dict
|
|
|
|
APPENDS = 'e' # extend list on stack by topmost stack slice
|
|
|
|
GET = 'g' # push item from memo on stack; index is string arg
|
|
|
|
BINGET = 'h' # " " " " " " ; " " 1-byte arg
|
|
|
|
INST = 'i' # build & push class instance
|
|
|
|
LONG_BINGET = 'j' # push item from memo on stack; index is 4-byte arg
|
|
|
|
LIST = 'l' # build list from topmost stack items
|
|
|
|
EMPTY_LIST = ']' # push empty list
|
|
|
|
OBJ = 'o' # build & push class instance
|
|
|
|
PUT = 'p' # store stack top in memo; index is string arg
|
|
|
|
BINPUT = 'q' # " " " " " ; " " 1-byte arg
|
|
|
|
LONG_BINPUT = 'r' # " " " " " ; " " 4-byte arg
|
|
|
|
SETITEM = 's' # add key+value pair to dict
|
|
|
|
TUPLE = 't' # build tuple from topmost stack items
|
|
|
|
EMPTY_TUPLE = ')' # push empty tuple
|
|
|
|
SETITEMS = 'u' # modify dict by adding topmost key+value pairs
|
|
|
|
BINFLOAT = 'G' # push float; arg is 8-byte float encoding
|
|
|
|
|
|
|
|
TRUE = 'I01\n' # not an opcode; see INT docs in pickletools.py
|
|
|
|
FALSE = 'I00\n' # not an opcode; see INT docs in pickletools.py
|
2002-04-03 18:41:51 -04:00
|
|
|
|
2003-01-29 02:16:12 -04:00
|
|
|
# Protocol 2
|
2003-01-27 20:22:12 -04:00
|
|
|
|
|
|
|
PROTO = '\x80' # identify pickle protocol
|
|
|
|
NEWOBJ = '\x81' # build object by applying cls.__new__ to argtuple
|
|
|
|
EXT1 = '\x82' # push object from extension registry; 1-byte index
|
|
|
|
EXT2 = '\x83' # ditto, but 2-byte index
|
|
|
|
EXT4 = '\x84' # ditto, but 4-byte index
|
|
|
|
TUPLE1 = '\x85' # build 1-tuple from stack top
|
|
|
|
TUPLE2 = '\x86' # build 2-tuple from two topmost stack items
|
|
|
|
TUPLE3 = '\x87' # build 3-tuple from three topmost stack items
|
|
|
|
NEWTRUE = '\x88' # push True
|
|
|
|
NEWFALSE = '\x89' # push False
|
|
|
|
LONG1 = '\x8a' # push long from < 256 bytes
|
|
|
|
LONG4 = '\x8b' # push really big long
|
2003-01-27 17:44:25 -04:00
|
|
|
|
2003-01-28 00:14:51 -04:00
|
|
|
_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
|
|
|
|
|
1995-01-09 20:31:14 -04:00
|
|
|
|
2001-02-17 23:10:09 -04:00
|
|
|
__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
|
2002-02-11 14:12:06 -04:00
|
|
|
del x
|
2001-02-17 23:10:09 -04:00
|
|
|
|
2003-01-28 11:19:53 -04:00
|
|
|
|
|
|
|
# Pickling machinery
|
|
|
|
|
1995-01-09 20:31:14 -04:00
|
|
|
class Pickler:
|
|
|
|
|
2003-02-03 12:59:48 -04:00
|
|
|
def __init__(self, file, proto=None, bin=None):
|
2002-05-29 13:18:42 -03:00
|
|
|
"""This takes a file-like object for writing a pickle data stream.
|
|
|
|
|
2003-01-27 18:47:53 -04:00
|
|
|
The optional proto argument tells the pickler to use the given
|
|
|
|
protocol; supported protocols are 0, 1, 2. The default
|
2003-01-31 15:42:31 -04:00
|
|
|
protocol is 0, to be backwards compatible. (Protocol 0 is the
|
|
|
|
only protocol that can be written to a file opened in text
|
2003-02-01 12:45:06 -04:00
|
|
|
mode and read back successfully. When using a protocol higher
|
|
|
|
than 0, make sure the file is opened in binary mode, both when
|
|
|
|
pickling and unpickling.)
|
2003-01-27 18:47:53 -04:00
|
|
|
|
|
|
|
Protocol 1 is more efficient than protocol 0; protocol 2 is
|
2003-01-31 15:42:31 -04:00
|
|
|
more efficient than protocol 1.
|
2003-01-27 18:47:53 -04:00
|
|
|
|
2003-01-31 15:42:31 -04:00
|
|
|
Specifying a negative protocol version selects the highest
|
2003-02-01 12:45:06 -04:00
|
|
|
protocol version supported. The higher the protocol used, the
|
|
|
|
more recent the version of Python needed to read the pickle
|
|
|
|
produced.
|
2002-05-29 13:18:42 -03:00
|
|
|
|
|
|
|
The file parameter must have a write() method that accepts a single
|
|
|
|
string argument. It can thus be an open file object, a StringIO
|
|
|
|
object, or any other custom object that meets this interface.
|
|
|
|
|
|
|
|
"""
|
2003-02-03 12:59:48 -04:00
|
|
|
if proto is not None and bin is not None:
|
|
|
|
raise ValueError, "can't specify both 'proto' and 'bin' arguments"
|
|
|
|
if bin is not None:
|
|
|
|
warnings.warn("The 'bin' argument to Pickler() is deprecated",
|
|
|
|
PendingDeprecationWarning)
|
|
|
|
proto = bin
|
|
|
|
if proto is None:
|
|
|
|
proto = 0
|
2003-01-31 15:42:31 -04:00
|
|
|
if proto < 0:
|
|
|
|
proto = 2
|
|
|
|
elif proto not in (0, 1, 2):
|
2003-01-27 23:49:52 -04:00
|
|
|
raise ValueError, "pickle protocol must be 0, 1 or 2"
|
1997-04-09 14:32:51 -03:00
|
|
|
self.write = file.write
|
|
|
|
self.memo = {}
|
2003-01-28 11:19:53 -04:00
|
|
|
self.proto = int(proto)
|
2003-01-27 18:47:53 -04:00
|
|
|
self.bin = proto >= 1
|
2003-01-29 13:58:45 -04:00
|
|
|
self.fast = 0
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2002-05-01 17:33:53 -03:00
|
|
|
def clear_memo(self):
|
2002-05-29 13:18:42 -03:00
|
|
|
"""Clears the pickler's "memo".
|
|
|
|
|
|
|
|
The memo is the data structure that remembers which objects the
|
2003-01-27 20:23:36 -04:00
|
|
|
pickler has already seen, so that shared or recursive objects are
|
|
|
|
pickled by reference and not by value. This method is useful when
|
|
|
|
re-using picklers.
|
2002-05-29 13:18:42 -03:00
|
|
|
|
|
|
|
"""
|
2002-05-01 17:33:53 -03:00
|
|
|
self.memo.clear()
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def dump(self, obj):
|
2003-02-01 12:45:06 -04:00
|
|
|
"""Write a pickled representation of obj to the open file."""
|
2003-01-27 23:49:52 -04:00
|
|
|
if self.proto >= 2:
|
|
|
|
self.write(PROTO + chr(self.proto))
|
2003-01-28 11:10:22 -04:00
|
|
|
self.save(obj)
|
1997-04-09 14:32:51 -03:00
|
|
|
self.write(STOP)
|
|
|
|
|
2003-01-24 15:29:52 -04:00
|
|
|
def memoize(self, obj):
|
|
|
|
"""Store an object in the memo."""
|
|
|
|
|
2003-01-27 17:22:10 -04:00
|
|
|
# The Pickler memo is a dictionary mapping object ids to 2-tuples
|
|
|
|
# that contain the Unpickler memo key and the object being memoized.
|
|
|
|
# The memo key is written to the pickle and will become
|
2003-01-24 15:29:52 -04:00
|
|
|
# the key in the Unpickler's memo. The object is stored in the
|
2003-01-27 17:22:10 -04:00
|
|
|
# Pickler memo so that transient objects are kept alive during
|
|
|
|
# pickling.
|
|
|
|
|
|
|
|
# The use of the Unpickler memo length as the memo key is just a
|
|
|
|
# convention. The only requirement is that the memo values be unique.
|
|
|
|
# But there appears no advantage to any other scheme, and this
|
2003-01-27 20:24:43 -04:00
|
|
|
# scheme allows the Unpickler memo to be implemented as a plain (but
|
2003-01-27 17:22:10 -04:00
|
|
|
# growable) array, indexed by memo key.
|
2003-01-29 13:58:45 -04:00
|
|
|
if self.fast:
|
|
|
|
return
|
2003-01-30 02:37:41 -04:00
|
|
|
assert id(obj) not in self.memo
|
2003-01-24 15:29:52 -04:00
|
|
|
memo_len = len(self.memo)
|
|
|
|
self.write(self.put(memo_len))
|
2003-01-27 21:00:38 -04:00
|
|
|
self.memo[id(obj)] = memo_len, obj
|
2003-01-24 15:29:52 -04:00
|
|
|
|
2003-01-27 17:25:41 -04:00
|
|
|
# Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
|
2003-01-27 23:03:08 -04:00
|
|
|
def put(self, i, pack=struct.pack):
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
|
|
|
if i < 256:
|
2003-01-27 23:03:08 -04:00
|
|
|
return BINPUT + chr(i)
|
|
|
|
else:
|
|
|
|
return LONG_BINPUT + pack("<i", i)
|
1997-04-09 14:32:51 -03:00
|
|
|
|
|
|
|
return PUT + `i` + '\n'
|
|
|
|
|
2003-01-27 17:25:41 -04:00
|
|
|
# Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
|
2003-01-27 23:03:08 -04:00
|
|
|
def get(self, i, pack=struct.pack):
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
|
|
|
if i < 256:
|
2003-01-27 23:03:08 -04:00
|
|
|
return BINGET + chr(i)
|
|
|
|
else:
|
|
|
|
return LONG_BINGET + pack("<i", i)
|
1997-04-09 14:32:51 -03:00
|
|
|
|
|
|
|
return GET + `i` + '\n'
|
2001-01-14 20:50:52 -04:00
|
|
|
|
2003-01-28 18:01:16 -04:00
|
|
|
def save(self, obj):
|
2003-01-28 12:34:19 -04:00
|
|
|
# Check for persistent id (defined by a subclass)
|
2003-01-28 11:10:22 -04:00
|
|
|
pid = self.persistent_id(obj)
|
2003-01-28 12:34:19 -04:00
|
|
|
if pid:
|
2002-11-13 18:01:27 -04:00
|
|
|
self.save_pers(pid)
|
|
|
|
return
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Check the memo
|
|
|
|
x = self.memo.get(id(obj))
|
|
|
|
if x:
|
|
|
|
self.write(self.get(x[0]))
|
1997-04-09 14:32:51 -03:00
|
|
|
return
|
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Check the type dispatch table
|
2003-01-28 11:10:22 -04:00
|
|
|
t = type(obj)
|
2003-01-28 12:34:19 -04:00
|
|
|
f = self.dispatch.get(t)
|
|
|
|
if f:
|
|
|
|
f(self, obj) # Call unbound method with explicit self
|
2003-01-27 20:48:09 -04:00
|
|
|
return
|
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Check for a class with a custom metaclass; treat as regular class
|
2003-01-27 20:48:09 -04:00
|
|
|
try:
|
|
|
|
issc = issubclass(t, TypeType)
|
2003-01-28 12:34:19 -04:00
|
|
|
except TypeError: # t is not a class (old Boost; see SF #502085)
|
2003-01-27 20:48:09 -04:00
|
|
|
issc = 0
|
|
|
|
if issc:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.save_global(obj)
|
2003-01-27 20:48:09 -04:00
|
|
|
return
|
2001-12-19 12:55:02 -04:00
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Check copy_reg.dispatch_table
|
|
|
|
reduce = dispatch_table.get(t)
|
2003-01-28 18:01:16 -04:00
|
|
|
if not reduce:
|
|
|
|
# Check for a __reduce__ method.
|
|
|
|
# Subtle: get the unbound method from the class, so that
|
|
|
|
# protocol 2 can override the default __reduce__ that all
|
|
|
|
# classes inherit from object. This has the added
|
|
|
|
# advantage that the call always has the form reduce(obj)
|
|
|
|
reduce = getattr(t, "__reduce__", None)
|
|
|
|
if self.proto >= 2:
|
|
|
|
# Protocol 2 can do better than the default __reduce__
|
|
|
|
if reduce is object.__reduce__:
|
|
|
|
reduce = None
|
|
|
|
if not reduce:
|
2003-01-28 14:22:35 -04:00
|
|
|
self.save_newobj(obj)
|
|
|
|
return
|
2003-01-28 18:01:16 -04:00
|
|
|
if not reduce:
|
2003-01-28 12:34:19 -04:00
|
|
|
raise PicklingError("Can't pickle %r object: %r" %
|
|
|
|
(t.__name__, obj))
|
2003-01-28 18:01:16 -04:00
|
|
|
rv = reduce(obj)
|
2003-01-28 12:34:19 -04:00
|
|
|
|
|
|
|
# Check for string returned by reduce(), meaning "save as global"
|
|
|
|
if type(rv) is StringType:
|
|
|
|
self.save_global(obj, rv)
|
2003-01-27 20:48:09 -04:00
|
|
|
return
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Assert that reduce() returned a tuple
|
|
|
|
if type(rv) is not TupleType:
|
|
|
|
raise PicklingError("%s must return string or tuple" % reduce)
|
2001-01-14 20:50:52 -04:00
|
|
|
|
2003-01-31 14:53:21 -04:00
|
|
|
# Assert that it returned an appropriately sized tuple
|
2003-01-28 12:34:19 -04:00
|
|
|
l = len(rv)
|
2003-01-31 14:53:21 -04:00
|
|
|
if not (2 <= l <= 5):
|
2003-01-28 12:34:19 -04:00
|
|
|
raise PicklingError("Tuple returned by %s must have "
|
2003-01-31 14:53:21 -04:00
|
|
|
"two to five elements" % reduce)
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Save the reduce() output and finally memoize the object
|
2003-01-31 14:53:21 -04:00
|
|
|
self.save_reduce(obj=obj, *rv)
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def persistent_id(self, obj):
|
2003-01-28 12:34:19 -04:00
|
|
|
# This exists so a subclass can override it
|
1997-04-09 14:32:51 -03:00
|
|
|
return None
|
|
|
|
|
|
|
|
def save_pers(self, pid):
|
2003-01-28 12:34:19 -04:00
|
|
|
# Save a persistent id reference
|
2003-01-27 21:03:10 -04:00
|
|
|
if self.bin:
|
2002-11-13 18:01:27 -04:00
|
|
|
self.save(pid)
|
1997-04-09 14:32:51 -03:00
|
|
|
self.write(BINPERSID)
|
2003-01-27 21:03:10 -04:00
|
|
|
else:
|
|
|
|
self.write(PERSID + str(pid) + '\n')
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-31 14:53:21 -04:00
|
|
|
def save_reduce(self, func, args, state=None,
|
|
|
|
listitems=None, dictitems=None, obj=None):
|
2003-01-28 12:34:19 -04:00
|
|
|
# This API is be called by some subclasses
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Assert that args is a tuple or None
|
|
|
|
if not isinstance(args, TupleType):
|
|
|
|
if args is None:
|
|
|
|
# A hack for Jim Fulton's ExtensionClass, now deprecated.
|
|
|
|
# See load_reduce()
|
|
|
|
warnings.warn("__basicnew__ special case is deprecated",
|
|
|
|
DeprecationWarning)
|
|
|
|
else:
|
|
|
|
raise PicklingError(
|
|
|
|
"args from reduce() should be a tuple")
|
2003-01-27 16:16:36 -04:00
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Assert that func is callable
|
|
|
|
if not callable(func):
|
|
|
|
raise PicklingError("func from reduce should be callable")
|
|
|
|
|
|
|
|
save = self.save
|
|
|
|
write = self.write
|
|
|
|
|
2003-01-31 12:51:45 -04:00
|
|
|
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
|
|
|
|
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
|
|
|
|
# A __reduce__ implementation can direct protocol 2 to
|
|
|
|
# use the more efficient NEWOBJ opcode, while still
|
|
|
|
# allowing protocol 0 and 1 to work normally. For this to
|
|
|
|
# work, the function returned by __reduce__ should be
|
|
|
|
# called __newobj__, and its first argument should be a
|
|
|
|
# new-style class. The implementation for __newobj__
|
|
|
|
# should be as follows, although pickle has no way to
|
|
|
|
# verify this:
|
|
|
|
#
|
|
|
|
# def __newobj__(cls, *args):
|
|
|
|
# return cls.__new__(cls, *args)
|
|
|
|
#
|
|
|
|
# Protocols 0 and 1 will pickle a reference to __newobj__,
|
|
|
|
# while protocol 2 (and above) will pickle a reference to
|
|
|
|
# cls, the remaining args tuple, and the NEWOBJ code,
|
|
|
|
# which calls cls.__new__(cls, *args) at unpickling time
|
|
|
|
# (see load_newobj below). If __reduce__ returns a
|
|
|
|
# three-tuple, the state from the third tuple item will be
|
|
|
|
# pickled regardless of the protocol, calling __setstate__
|
|
|
|
# at unpickling time (see load_build below).
|
|
|
|
#
|
|
|
|
# Note that no standard __newobj__ implementation exists;
|
|
|
|
# you have to provide your own. This is to enforce
|
|
|
|
# compatibility with Python 2.2 (pickles written using
|
|
|
|
# protocol 0 or 1 in Python 2.3 should be unpicklable by
|
|
|
|
# Python 2.2).
|
|
|
|
cls = args[0]
|
|
|
|
if not hasattr(cls, "__new__"):
|
|
|
|
raise PicklingError(
|
|
|
|
"args[0] from __newobj__ args has no __new__")
|
2003-01-31 13:17:49 -04:00
|
|
|
if obj is not None and cls is not obj.__class__:
|
|
|
|
raise PicklingError(
|
|
|
|
"args[0] from __newobj__ args has the wrong class")
|
2003-01-31 12:51:45 -04:00
|
|
|
args = args[1:]
|
|
|
|
save(cls)
|
|
|
|
save(args)
|
|
|
|
write(NEWOBJ)
|
|
|
|
else:
|
|
|
|
save(func)
|
|
|
|
save(args)
|
|
|
|
write(REDUCE)
|
2001-01-14 20:50:52 -04:00
|
|
|
|
2003-01-31 13:17:49 -04:00
|
|
|
if obj is not None:
|
|
|
|
self.memoize(obj)
|
|
|
|
|
2003-01-31 14:53:21 -04:00
|
|
|
# More new special cases (that work with older protocols as
|
|
|
|
# well): when __reduce__ returns a tuple with 4 or 5 items,
|
|
|
|
# the 4th and 5th item should be iterators that provide list
|
|
|
|
# items and dict items (as (key, value) tuples), or None.
|
|
|
|
|
|
|
|
if listitems is not None:
|
|
|
|
self._batch_appends(listitems)
|
|
|
|
|
|
|
|
if dictitems is not None:
|
|
|
|
self._batch_setitems(dictitems)
|
|
|
|
|
2001-04-09 23:48:53 -03:00
|
|
|
if state is not None:
|
1997-04-09 14:32:51 -03:00
|
|
|
save(state)
|
|
|
|
write(BUILD)
|
|
|
|
|
2003-01-28 14:22:35 -04:00
|
|
|
def save_newobj(self, obj):
|
|
|
|
# Save a new-style class instance, using protocol 2.
|
2003-01-28 18:01:16 -04:00
|
|
|
assert self.proto >= 2 # This only works for protocol 2
|
2003-01-28 14:22:35 -04:00
|
|
|
t = type(obj)
|
|
|
|
getnewargs = getattr(obj, "__getnewargs__", None)
|
|
|
|
if getnewargs:
|
2003-01-31 00:04:23 -04:00
|
|
|
args = getnewargs() # This better not reference obj
|
2003-01-28 15:48:18 -04:00
|
|
|
else:
|
2003-01-29 13:58:45 -04:00
|
|
|
args = ()
|
2003-01-28 15:48:18 -04:00
|
|
|
|
|
|
|
save = self.save
|
|
|
|
write = self.write
|
|
|
|
|
2003-01-30 02:37:41 -04:00
|
|
|
self.save(t)
|
2003-01-28 15:48:18 -04:00
|
|
|
save(args)
|
|
|
|
write(NEWOBJ)
|
2003-01-28 14:22:35 -04:00
|
|
|
self.memoize(obj)
|
2003-01-28 15:48:18 -04:00
|
|
|
|
|
|
|
if isinstance(obj, list):
|
2003-01-31 14:53:21 -04:00
|
|
|
self._batch_appends(iter(obj))
|
2003-01-28 15:48:18 -04:00
|
|
|
elif isinstance(obj, dict):
|
2003-01-31 14:53:21 -04:00
|
|
|
self._batch_setitems(obj.iteritems())
|
2003-01-28 15:48:18 -04:00
|
|
|
|
2003-01-28 14:22:35 -04:00
|
|
|
getstate = getattr(obj, "__getstate__", None)
|
2003-01-30 01:39:04 -04:00
|
|
|
|
2003-01-28 14:22:35 -04:00
|
|
|
if getstate:
|
2003-01-30 01:41:19 -04:00
|
|
|
# A class may define both __getstate__ and __getnewargs__.
|
|
|
|
# If they are the same function, we ignore __getstate__.
|
|
|
|
# This is for the benefit of protocols 0 and 1, which don't
|
|
|
|
# use __getnewargs__. Note that the only way to make them
|
|
|
|
# the same function is something like this:
|
|
|
|
#
|
|
|
|
# class C(object):
|
|
|
|
# def __getstate__(self):
|
|
|
|
# return ...
|
|
|
|
# __getnewargs__ = __getstate__
|
|
|
|
#
|
|
|
|
# No tricks are needed to ignore __setstate__; it simply
|
|
|
|
# won't be called when we don't generate BUILD.
|
|
|
|
# Also note that when __getnewargs__ and __getstate__ are
|
|
|
|
# the same function, we don't do the default thing of
|
|
|
|
# looking for __dict__ and slots either -- it is assumed
|
|
|
|
# that __getnewargs__ returns all the state there is
|
|
|
|
# (which should be a safe assumption since __getstate__
|
|
|
|
# returns the *same* state).
|
|
|
|
if getstate == getnewargs:
|
|
|
|
return
|
|
|
|
|
2003-01-28 18:01:16 -04:00
|
|
|
try:
|
|
|
|
state = getstate()
|
|
|
|
except TypeError, err:
|
|
|
|
# XXX Catch generic exception caused by __slots__
|
|
|
|
if str(err) != ("a class that defines __slots__ "
|
|
|
|
"without defining __getstate__ "
|
|
|
|
"cannot be pickled"):
|
|
|
|
print repr(str(err))
|
|
|
|
raise # Not that specific exception
|
|
|
|
getstate = None
|
2003-01-30 01:41:19 -04:00
|
|
|
|
2003-01-28 18:01:16 -04:00
|
|
|
if not getstate:
|
2003-01-28 14:22:35 -04:00
|
|
|
state = getattr(obj, "__dict__", None)
|
2003-01-29 02:14:11 -04:00
|
|
|
if not state:
|
|
|
|
state = None
|
2003-01-28 18:01:16 -04:00
|
|
|
# If there are slots, the state becomes a tuple of two
|
|
|
|
# items: the first item the regular __dict__ or None, and
|
|
|
|
# the second a dict mapping slot names to slot values
|
|
|
|
names = _slotnames(t)
|
|
|
|
if names:
|
|
|
|
slots = {}
|
|
|
|
nil = []
|
|
|
|
for name in names:
|
|
|
|
value = getattr(obj, name, nil)
|
|
|
|
if value is not nil:
|
|
|
|
slots[name] = value
|
|
|
|
if slots:
|
|
|
|
state = (state, slots)
|
|
|
|
|
2003-01-28 14:22:35 -04:00
|
|
|
if state is not None:
|
2003-01-28 15:48:18 -04:00
|
|
|
save(state)
|
|
|
|
write(BUILD)
|
2003-01-28 14:22:35 -04:00
|
|
|
|
2003-01-28 12:34:19 -04:00
|
|
|
# Methods below this point are dispatched through the dispatch table
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch = {}
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_none(self, obj):
|
1997-04-09 14:32:51 -03:00
|
|
|
self.write(NONE)
|
|
|
|
dispatch[NoneType] = save_none
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_bool(self, obj):
|
2003-01-28 00:25:27 -04:00
|
|
|
if self.proto >= 2:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(obj and NEWTRUE or NEWFALSE)
|
2003-01-28 00:25:27 -04:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(obj and TRUE or FALSE)
|
2002-04-03 18:41:51 -04:00
|
|
|
dispatch[bool] = save_bool
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_int(self, obj, pack=struct.pack):
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
2001-04-10 02:02:52 -03:00
|
|
|
# If the int is small enough to fit in a signed 4-byte 2's-comp
|
|
|
|
# format, we can store it more efficiently than the general
|
|
|
|
# case.
|
2003-01-27 23:03:08 -04:00
|
|
|
# First one- and two-byte unsigned ints:
|
2003-01-28 11:10:22 -04:00
|
|
|
if obj >= 0:
|
|
|
|
if obj <= 0xff:
|
|
|
|
self.write(BININT1 + chr(obj))
|
2003-01-27 23:03:08 -04:00
|
|
|
return
|
2003-01-28 11:10:22 -04:00
|
|
|
if obj <= 0xffff:
|
2003-01-29 16:14:23 -04:00
|
|
|
self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
|
2003-01-27 23:03:08 -04:00
|
|
|
return
|
|
|
|
# Next check for 4-byte signed ints:
|
2003-01-28 11:10:22 -04:00
|
|
|
high_bits = obj >> 31 # note that Python shift sign-extends
|
2003-01-27 23:41:54 -04:00
|
|
|
if high_bits == 0 or high_bits == -1:
|
2001-04-10 02:02:52 -03:00
|
|
|
# All high bits are copies of bit 2**31, so the value
|
|
|
|
# fits in a 4-byte signed int.
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(BININT + pack("<i", obj))
|
1997-04-09 14:32:51 -03:00
|
|
|
return
|
2001-04-10 02:02:52 -03:00
|
|
|
# Text pickle, or int too big to fit in signed 4-byte format.
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(INT + `obj` + '\n')
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[IntType] = save_int
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_long(self, obj, pack=struct.pack):
|
2003-01-27 23:49:52 -04:00
|
|
|
if self.proto >= 2:
|
2003-01-28 11:10:22 -04:00
|
|
|
bytes = encode_long(obj)
|
2003-01-27 23:49:52 -04:00
|
|
|
n = len(bytes)
|
|
|
|
if n < 256:
|
|
|
|
self.write(LONG1 + chr(n) + bytes)
|
|
|
|
else:
|
|
|
|
self.write(LONG4 + pack("<i", n) + bytes)
|
cPickle.c: Full support for the new LONG1 and LONG4. Added comments.
Assorted code cleanups; e.g., sizeof(char) is 1 by definition, so there's
no need to do things like multiply by sizeof(char) in hairy malloc
arguments. Fixed an undetected-overflow bug in readline_file().
longobject.c: Fixed a really stupid bug in the new _PyLong_NumBits.
pickle.py: Fixed stupid bug in save_long(): When proto is 2, it
wrote LONG1 or LONG4, but forgot to return then -- it went on to
append the proto 1 LONG opcode too.
Fixed equally stupid cancelling bugs in load_long1() and
load_long4(): they *returned* the unpickled long instead of pushing
it on the stack. The return values were ignored. Tests passed
before only because save_long() pickled the long twice.
Fixed bugs in encode_long().
Noted that decode_long() is quadratic-time despite our hopes,
because long(string, 16) is still quadratic-time in len(string).
It's hex() that's linear-time. I don't know a way to make decode_long()
linear-time in Python, short of maybe transforming the 256's-complement
bytes into marshal's funky internal format, and letting marshal decode
that. It would be more valuable to make long(string, 16) linear time.
pickletester.py: Added a global "protocols" vector so tests can try
all the protocols in a sane way. Changed test_ints() and test_unicode()
to do so. Added a new test_long(), but the tail end of it is disabled
because it "takes forever" under pickle.py (but runs very quickly under
cPickle: cPickle proto 2 for longs is linear-time).
2003-02-01 22:57:53 -04:00
|
|
|
return
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(LONG + `obj` + '\n')
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[LongType] = save_long
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_float(self, obj, pack=struct.pack):
|
1998-10-22 17:15:36 -03:00
|
|
|
if self.bin:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(BINFLOAT + pack('>d', obj))
|
1998-10-22 17:15:36 -03:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(FLOAT + `obj` + '\n')
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[FloatType] = save_float
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_string(self, obj, pack=struct.pack):
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
2003-01-28 11:10:22 -04:00
|
|
|
n = len(obj)
|
2003-01-27 17:15:36 -04:00
|
|
|
if n < 256:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(SHORT_BINSTRING + chr(n) + obj)
|
1997-04-09 14:32:51 -03:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(BINSTRING + pack("<i", n) + obj)
|
1997-04-09 14:32:51 -03:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(STRING + `obj` + '\n')
|
|
|
|
self.memoize(obj)
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[StringType] = save_string
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_unicode(self, obj, pack=struct.pack):
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
2003-01-28 11:10:22 -04:00
|
|
|
encoding = obj.encode('utf-8')
|
2003-01-27 17:15:36 -04:00
|
|
|
n = len(encoding)
|
2003-01-27 23:03:08 -04:00
|
|
|
self.write(BINUNICODE + pack("<i", n) + encoding)
|
2000-03-10 19:20:09 -04:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
obj = obj.replace("\\", "\\u005c")
|
|
|
|
obj = obj.replace("\n", "\\u000a")
|
|
|
|
self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
|
|
|
|
self.memoize(obj)
|
2000-03-10 19:20:09 -04:00
|
|
|
dispatch[UnicodeType] = save_unicode
|
|
|
|
|
2001-01-22 10:53:29 -04:00
|
|
|
if StringType == UnicodeType:
|
|
|
|
# This is true for Jython
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_string(self, obj, pack=struct.pack):
|
|
|
|
unicode = obj.isunicode()
|
2001-01-22 10:53:29 -04:00
|
|
|
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
2001-01-22 10:53:29 -04:00
|
|
|
if unicode:
|
2003-01-28 11:10:22 -04:00
|
|
|
obj = obj.encode("utf-8")
|
|
|
|
l = len(obj)
|
2001-04-09 23:48:53 -03:00
|
|
|
if l < 256 and not unicode:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(SHORT_BINSTRING + chr(l) + obj)
|
2001-01-22 10:53:29 -04:00
|
|
|
else:
|
2003-01-27 23:03:08 -04:00
|
|
|
s = pack("<i", l)
|
2001-01-22 10:53:29 -04:00
|
|
|
if unicode:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(BINUNICODE + s + obj)
|
2001-01-22 10:53:29 -04:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(BINSTRING + s + obj)
|
2001-01-22 10:53:29 -04:00
|
|
|
else:
|
2001-02-09 16:06:00 -04:00
|
|
|
if unicode:
|
2003-01-28 11:10:22 -04:00
|
|
|
obj = obj.replace("\\", "\\u005c")
|
|
|
|
obj = obj.replace("\n", "\\u000a")
|
|
|
|
obj = obj.encode('raw-unicode-escape')
|
|
|
|
self.write(UNICODE + obj + '\n')
|
2001-01-22 10:53:29 -04:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
self.write(STRING + `obj` + '\n')
|
|
|
|
self.memoize(obj)
|
2001-01-22 10:53:29 -04:00
|
|
|
dispatch[StringType] = save_string
|
2001-02-09 16:06:00 -04:00
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_tuple(self, obj):
|
1997-04-09 14:32:51 -03:00
|
|
|
write = self.write
|
2003-01-28 00:14:51 -04:00
|
|
|
proto = self.proto
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
n = len(obj)
|
2003-02-02 16:29:39 -04:00
|
|
|
if n == 0:
|
|
|
|
if proto:
|
|
|
|
write(EMPTY_TUPLE)
|
|
|
|
else:
|
|
|
|
write(MARK + TUPLE)
|
2003-01-28 01:48:29 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
save = self.save
|
|
|
|
memo = self.memo
|
|
|
|
if n <= 3 and proto >= 2:
|
2003-01-28 11:10:22 -04:00
|
|
|
for element in obj:
|
2003-01-28 01:48:29 -04:00
|
|
|
save(element)
|
|
|
|
# Subtle. Same as in the big comment below.
|
2003-01-28 11:10:22 -04:00
|
|
|
if id(obj) in memo:
|
|
|
|
get = self.get(memo[id(obj)][0])
|
2003-01-28 01:48:29 -04:00
|
|
|
write(POP * n + get)
|
|
|
|
else:
|
|
|
|
write(_tuplesize2code[n])
|
2003-01-28 11:10:22 -04:00
|
|
|
self.memoize(obj)
|
2003-01-28 01:48:29 -04:00
|
|
|
return
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-02-02 16:29:39 -04:00
|
|
|
# proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
|
2003-01-28 01:34:53 -04:00
|
|
|
# has more than 3 elements.
|
1997-04-09 14:32:51 -03:00
|
|
|
write(MARK)
|
2003-01-28 11:10:22 -04:00
|
|
|
for element in obj:
|
1997-04-09 14:32:51 -03:00
|
|
|
save(element)
|
|
|
|
|
2003-02-02 16:29:39 -04:00
|
|
|
if id(obj) in memo:
|
2003-01-27 22:09:55 -04:00
|
|
|
# Subtle. d was not in memo when we entered save_tuple(), so
|
|
|
|
# the process of saving the tuple's elements must have saved
|
|
|
|
# the tuple itself: the tuple is recursive. The proper action
|
|
|
|
# now is to throw away everything we put on the stack, and
|
|
|
|
# simply GET the tuple (it's already constructed). This check
|
|
|
|
# could have been done in the "for element" loop instead, but
|
|
|
|
# recursive tuples are a rare thing.
|
2003-01-28 11:10:22 -04:00
|
|
|
get = self.get(memo[id(obj)][0])
|
2003-01-28 00:14:51 -04:00
|
|
|
if proto:
|
2003-01-27 22:09:55 -04:00
|
|
|
write(POP_MARK + get)
|
|
|
|
else: # proto 0 -- POP_MARK not available
|
2003-01-28 01:48:29 -04:00
|
|
|
write(POP * (n+1) + get)
|
1997-04-09 14:32:51 -03:00
|
|
|
return
|
|
|
|
|
2003-02-02 16:29:39 -04:00
|
|
|
# No recursion.
|
2003-01-27 21:00:38 -04:00
|
|
|
self.write(TUPLE)
|
2003-02-02 16:29:39 -04:00
|
|
|
self.memoize(obj)
|
2003-01-24 15:29:52 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[TupleType] = save_tuple
|
|
|
|
|
2003-01-28 12:58:41 -04:00
|
|
|
# save_empty_tuple() isn't used by anything in Python 2.3. However, I
|
|
|
|
# found a Pickler subclass in Zope3 that calls it, so it's not harmless
|
|
|
|
# to remove it.
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_empty_tuple(self, obj):
|
1997-04-09 14:32:51 -03:00
|
|
|
self.write(EMPTY_TUPLE)
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_list(self, obj):
|
1997-04-09 14:32:51 -03:00
|
|
|
write = self.write
|
|
|
|
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
1997-04-09 14:32:51 -03:00
|
|
|
write(EMPTY_LIST)
|
2003-01-31 14:53:21 -04:00
|
|
|
else: # proto 0 -- can't use EMPTY_LIST
|
|
|
|
write(MARK + LIST)
|
|
|
|
|
|
|
|
self.memoize(obj)
|
|
|
|
self._batch_appends(iter(obj))
|
|
|
|
|
|
|
|
dispatch[ListType] = save_list
|
|
|
|
|
|
|
|
_BATCHSIZE = 1000
|
|
|
|
|
|
|
|
def _batch_appends(self, items):
|
|
|
|
# Helper to batch up APPENDS sequences
|
|
|
|
save = self.save
|
|
|
|
write = self.write
|
|
|
|
|
|
|
|
if not self.bin:
|
|
|
|
for x in items:
|
|
|
|
save(x)
|
|
|
|
write(APPEND)
|
|
|
|
return
|
|
|
|
|
|
|
|
r = xrange(self._BATCHSIZE)
|
|
|
|
while items is not None:
|
|
|
|
tmp = []
|
|
|
|
for i in r:
|
|
|
|
try:
|
|
|
|
tmp.append(items.next())
|
|
|
|
except StopIteration:
|
|
|
|
items = None
|
|
|
|
break
|
|
|
|
n = len(tmp)
|
2003-01-27 21:15:46 -04:00
|
|
|
if n > 1:
|
|
|
|
write(MARK)
|
2003-01-31 14:53:21 -04:00
|
|
|
for x in tmp:
|
|
|
|
save(x)
|
2003-01-27 21:15:46 -04:00
|
|
|
write(APPENDS)
|
|
|
|
elif n:
|
2003-01-31 14:53:21 -04:00
|
|
|
save(tmp[0])
|
2003-01-27 21:15:46 -04:00
|
|
|
write(APPEND)
|
2003-01-31 14:53:21 -04:00
|
|
|
# else tmp is empty, and we're done
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_dict(self, obj):
|
1997-04-09 14:32:51 -03:00
|
|
|
write = self.write
|
|
|
|
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
1997-04-09 14:32:51 -03:00
|
|
|
write(EMPTY_DICT)
|
2003-01-31 14:53:21 -04:00
|
|
|
else: # proto 0 -- can't use EMPTY_DICT
|
2003-01-27 21:34:43 -04:00
|
|
|
write(MARK + DICT)
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-31 14:53:21 -04:00
|
|
|
self.memoize(obj)
|
|
|
|
self._batch_setitems(obj.iteritems())
|
1997-04-09 14:32:51 -03:00
|
|
|
|
|
|
|
dispatch[DictionaryType] = save_dict
|
1998-05-27 19:38:22 -03:00
|
|
|
if not PyStringMap is None:
|
|
|
|
dispatch[PyStringMap] = save_dict
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-31 14:53:21 -04:00
|
|
|
def _batch_setitems(self, items):
|
|
|
|
# Helper to batch up SETITEMS sequences; proto >= 1 only
|
|
|
|
save = self.save
|
|
|
|
write = self.write
|
|
|
|
|
|
|
|
if not self.bin:
|
|
|
|
for k, v in items:
|
|
|
|
save(k)
|
|
|
|
save(v)
|
|
|
|
write(SETITEM)
|
|
|
|
return
|
|
|
|
|
|
|
|
r = xrange(self._BATCHSIZE)
|
|
|
|
while items is not None:
|
|
|
|
tmp = []
|
|
|
|
for i in r:
|
|
|
|
try:
|
|
|
|
tmp.append(items.next())
|
|
|
|
except StopIteration:
|
|
|
|
items = None
|
|
|
|
break
|
|
|
|
n = len(tmp)
|
|
|
|
if n > 1:
|
|
|
|
write(MARK)
|
|
|
|
for k, v in tmp:
|
|
|
|
save(k)
|
|
|
|
save(v)
|
|
|
|
write(SETITEMS)
|
|
|
|
elif n:
|
|
|
|
k, v = tmp[0]
|
|
|
|
save(k)
|
|
|
|
save(v)
|
|
|
|
write(SETITEM)
|
|
|
|
# else tmp is empty, and we're done
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def save_inst(self, obj):
|
|
|
|
cls = obj.__class__
|
1997-04-09 14:32:51 -03:00
|
|
|
|
|
|
|
memo = self.memo
|
|
|
|
write = self.write
|
|
|
|
save = self.save
|
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
if hasattr(obj, '__getinitargs__'):
|
|
|
|
args = obj.__getinitargs__()
|
1997-04-09 14:32:51 -03:00
|
|
|
len(args) # XXX Assert it's a sequence
|
1998-03-26 17:13:24 -04:00
|
|
|
_keep_alive(args, memo)
|
1997-04-09 14:32:51 -03:00
|
|
|
else:
|
|
|
|
args = ()
|
|
|
|
|
|
|
|
write(MARK)
|
|
|
|
|
2001-04-09 23:48:53 -03:00
|
|
|
if self.bin:
|
1997-04-09 14:32:51 -03:00
|
|
|
save(cls)
|
2003-01-27 23:51:36 -04:00
|
|
|
for arg in args:
|
|
|
|
save(arg)
|
|
|
|
write(OBJ)
|
1997-04-09 14:32:51 -03:00
|
|
|
else:
|
2003-01-27 23:51:36 -04:00
|
|
|
for arg in args:
|
|
|
|
save(arg)
|
|
|
|
write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
self.memoize(obj)
|
1997-04-09 14:32:51 -03:00
|
|
|
|
|
|
|
try:
|
2003-01-28 11:10:22 -04:00
|
|
|
getstate = obj.__getstate__
|
1997-04-09 14:32:51 -03:00
|
|
|
except AttributeError:
|
2003-01-28 11:10:22 -04:00
|
|
|
stuff = obj.__dict__
|
1997-04-09 14:32:51 -03:00
|
|
|
else:
|
|
|
|
stuff = getstate()
|
1998-03-26 17:13:24 -04:00
|
|
|
_keep_alive(stuff, memo)
|
1997-04-09 14:32:51 -03:00
|
|
|
save(stuff)
|
|
|
|
write(BUILD)
|
2003-01-27 23:51:36 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[InstanceType] = save_inst
|
|
|
|
|
2003-01-29 02:14:11 -04:00
|
|
|
def save_global(self, obj, name=None, pack=struct.pack):
|
1997-04-09 14:32:51 -03:00
|
|
|
write = self.write
|
|
|
|
memo = self.memo
|
|
|
|
|
2001-04-09 23:48:53 -03:00
|
|
|
if name is None:
|
2003-01-28 11:10:22 -04:00
|
|
|
name = obj.__name__
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-31 14:33:18 -04:00
|
|
|
module = getattr(obj, "__module__", None)
|
|
|
|
if module is None:
|
2003-01-28 11:10:22 -04:00
|
|
|
module = whichmodule(obj, name)
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2001-08-17 15:49:52 -03:00
|
|
|
try:
|
|
|
|
__import__(module)
|
|
|
|
mod = sys.modules[module]
|
|
|
|
klass = getattr(mod, name)
|
|
|
|
except (ImportError, KeyError, AttributeError):
|
|
|
|
raise PicklingError(
|
|
|
|
"Can't pickle %r: it's not found as %s.%s" %
|
2003-01-28 11:10:22 -04:00
|
|
|
(obj, module, name))
|
2001-08-17 15:49:52 -03:00
|
|
|
else:
|
2003-01-28 11:10:22 -04:00
|
|
|
if klass is not obj:
|
2001-08-17 15:49:52 -03:00
|
|
|
raise PicklingError(
|
|
|
|
"Can't pickle %r: it's not the same object as %s.%s" %
|
2003-01-28 11:10:22 -04:00
|
|
|
(obj, module, name))
|
2001-08-17 15:49:52 -03:00
|
|
|
|
2003-01-29 02:14:11 -04:00
|
|
|
if self.proto >= 2:
|
2003-02-03 21:54:49 -04:00
|
|
|
code = _extension_registry.get((module, name))
|
2003-01-29 02:14:11 -04:00
|
|
|
if code:
|
|
|
|
assert code > 0
|
|
|
|
if code <= 0xff:
|
|
|
|
write(EXT1 + chr(code))
|
|
|
|
elif code <= 0xffff:
|
2003-01-29 16:14:23 -04:00
|
|
|
write("%c%c%c" % (EXT2, code&0xff, code>>8))
|
2003-01-29 02:14:11 -04:00
|
|
|
else:
|
|
|
|
write(EXT4 + pack("<i", code))
|
|
|
|
return
|
|
|
|
|
2003-01-27 21:00:38 -04:00
|
|
|
write(GLOBAL + module + '\n' + name + '\n')
|
2003-01-28 11:10:22 -04:00
|
|
|
self.memoize(obj)
|
2003-01-27 23:51:36 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[ClassType] = save_global
|
|
|
|
dispatch[FunctionType] = save_global
|
|
|
|
dispatch[BuiltinFunctionType] = save_global
|
2001-08-02 01:15:00 -03:00
|
|
|
dispatch[TypeType] = save_global
|
1995-03-14 11:09:05 -04:00
|
|
|
|
2003-01-28 11:19:53 -04:00
|
|
|
# Pickling helpers
|
1995-01-09 20:31:14 -04:00
|
|
|
|
2003-01-28 18:01:16 -04:00
|
|
|
def _slotnames(cls):
|
|
|
|
"""Return a list of slot names for a given class.
|
|
|
|
|
|
|
|
This needs to find slots defined by the class and its bases, so we
|
|
|
|
can't simply return the __slots__ attribute. We must walk down
|
|
|
|
the Method Resolution Order and concatenate the __slots__ of each
|
|
|
|
class found there. (This assumes classes don't modify their
|
|
|
|
__slots__ attribute to misrepresent their slots after the class is
|
|
|
|
defined.)
|
|
|
|
"""
|
2003-02-03 15:46:54 -04:00
|
|
|
|
|
|
|
# Get the value from a cache in the class if possible
|
|
|
|
names = cls.__dict__.get("__slotnames__")
|
|
|
|
if names is not None:
|
|
|
|
return names
|
|
|
|
|
|
|
|
# Not cached -- calculate the value
|
2003-01-28 18:01:16 -04:00
|
|
|
names = []
|
2003-02-03 15:46:54 -04:00
|
|
|
if not hasattr(cls, "__slots__"):
|
|
|
|
# This class has no slots
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# Slots found -- gather slot names from all base classes
|
|
|
|
for c in cls.__mro__:
|
|
|
|
if "__slots__" in c.__dict__:
|
|
|
|
names += [name for name in c.__dict__["__slots__"]
|
|
|
|
if name not in ("__dict__", "__weakref__")]
|
|
|
|
|
|
|
|
# Cache the outcome in the class if at all possible
|
|
|
|
try:
|
|
|
|
cls.__slotnames__ = names
|
|
|
|
except:
|
|
|
|
pass # But don't die if we can't
|
|
|
|
|
2003-01-28 18:01:16 -04:00
|
|
|
return names
|
|
|
|
|
1997-09-02 21:23:54 -03:00
|
|
|
def _keep_alive(x, memo):
|
|
|
|
"""Keeps a reference to the object x in the memo.
|
|
|
|
|
|
|
|
Because we remember objects by their id, we have
|
|
|
|
to assure that possibly temporary objects are kept
|
|
|
|
alive by referencing them.
|
|
|
|
We store a reference at the id of the memo, which should
|
|
|
|
normally not be used unless someone tries to deepcopy
|
|
|
|
the memo itself...
|
|
|
|
"""
|
|
|
|
try:
|
1998-03-26 17:13:24 -04:00
|
|
|
memo[id(memo)].append(x)
|
1997-09-02 21:23:54 -03:00
|
|
|
except KeyError:
|
1998-03-26 17:13:24 -04:00
|
|
|
# aha, this is the first one :-)
|
|
|
|
memo[id(memo)]=[x]
|
1997-09-02 21:23:54 -03:00
|
|
|
|
|
|
|
|
2003-01-28 20:56:17 -04:00
|
|
|
# A cache for whichmodule(), mapping a function object to the name of
|
|
|
|
# the module in which the function was found.
|
|
|
|
|
2002-09-19 20:00:12 -03:00
|
|
|
classmap = {} # called classmap for backwards compatibility
|
1995-01-09 20:31:14 -04:00
|
|
|
|
2002-09-19 20:00:12 -03:00
|
|
|
def whichmodule(func, funcname):
|
|
|
|
"""Figure out the module in which a function occurs.
|
1995-01-09 20:31:14 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
Search sys.modules for the module.
|
|
|
|
Cache in classmap.
|
|
|
|
Return a module name.
|
2003-01-28 20:56:17 -04:00
|
|
|
If the function cannot be found, return "__main__".
|
1997-04-09 14:32:51 -03:00
|
|
|
"""
|
2003-01-31 14:33:18 -04:00
|
|
|
# Python functions should always get an __module__ from their globals.
|
|
|
|
mod = getattr(func, "__module__", None)
|
|
|
|
if mod is not None:
|
|
|
|
return mod
|
2002-09-19 20:00:12 -03:00
|
|
|
if func in classmap:
|
|
|
|
return classmap[func]
|
1995-01-09 20:31:14 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
for name, module in sys.modules.items():
|
2002-09-19 20:00:12 -03:00
|
|
|
if module is None:
|
2002-09-19 19:57:26 -03:00
|
|
|
continue # skip dummy package entries
|
1998-03-26 17:13:24 -04:00
|
|
|
if name != '__main__' and \
|
2003-02-06 12:22:01 -04:00
|
|
|
getattr(module, funcname, None) is func:
|
1997-04-09 14:32:51 -03:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
name = '__main__'
|
2002-09-19 20:00:12 -03:00
|
|
|
classmap[func] = name
|
1997-04-09 14:32:51 -03:00
|
|
|
return name
|
1995-01-09 20:31:14 -04:00
|
|
|
|
|
|
|
|
2003-01-28 11:19:53 -04:00
|
|
|
# Unpickling machinery
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
class Unpickler:
|
1995-01-09 20:31:14 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def __init__(self, file):
|
2002-05-29 13:18:42 -03:00
|
|
|
"""This takes a file-like object for reading a pickle data stream.
|
|
|
|
|
2003-02-01 12:45:06 -04:00
|
|
|
The protocol version of the pickle is detected automatically, so no
|
|
|
|
proto argument is needed.
|
2002-05-29 13:18:42 -03:00
|
|
|
|
|
|
|
The file-like object must have two methods, a read() method that
|
|
|
|
takes an integer argument, and a readline() method that requires no
|
|
|
|
arguments. Both methods should return a string. Thus file-like
|
|
|
|
object can be a file object opened for reading, a StringIO object,
|
|
|
|
or any other custom object that meets this interface.
|
|
|
|
"""
|
1997-04-09 14:32:51 -03:00
|
|
|
self.readline = file.readline
|
|
|
|
self.read = file.read
|
|
|
|
self.memo = {}
|
|
|
|
|
|
|
|
def load(self):
|
2003-01-28 11:10:22 -04:00
|
|
|
"""Read a pickled object representation from the open file.
|
2002-05-29 13:18:42 -03:00
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
Return the reconstituted object hierarchy specified in the file.
|
2002-05-29 13:18:42 -03:00
|
|
|
"""
|
2001-11-09 12:15:04 -04:00
|
|
|
self.mark = object() # any new unique object
|
1997-04-09 14:32:51 -03:00
|
|
|
self.stack = []
|
|
|
|
self.append = self.stack.append
|
|
|
|
read = self.read
|
|
|
|
dispatch = self.dispatch
|
|
|
|
try:
|
|
|
|
while 1:
|
|
|
|
key = read(1)
|
|
|
|
dispatch[key](self)
|
2000-12-13 14:11:56 -04:00
|
|
|
except _Stop, stopinst:
|
|
|
|
return stopinst.value
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-27 21:41:51 -04:00
|
|
|
# Return largest index k such that self.stack[k] is self.mark.
|
|
|
|
# If the stack doesn't contain a mark, eventually raises IndexError.
|
|
|
|
# This could be sped by maintaining another stack, of indices at which
|
|
|
|
# the mark appears. For that matter, the latter stack would suffice,
|
|
|
|
# and we wouldn't need to push mark objects on self.stack at all.
|
|
|
|
# Doing so is probably a good thing, though, since if the pickle is
|
|
|
|
# corrupt (or hostile) we may get a clue from finding self.mark embedded
|
|
|
|
# in unpickled objects.
|
1997-04-09 14:32:51 -03:00
|
|
|
def marker(self):
|
|
|
|
stack = self.stack
|
|
|
|
mark = self.mark
|
|
|
|
k = len(stack)-1
|
|
|
|
while stack[k] is not mark: k = k-1
|
|
|
|
return k
|
|
|
|
|
|
|
|
dispatch = {}
|
|
|
|
|
|
|
|
def load_eof(self):
|
|
|
|
raise EOFError
|
|
|
|
dispatch[''] = load_eof
|
|
|
|
|
2003-01-27 23:49:52 -04:00
|
|
|
def load_proto(self):
|
|
|
|
proto = ord(self.read(1))
|
|
|
|
if not 0 <= proto <= 2:
|
|
|
|
raise ValueError, "unsupported pickle protocol: %d" % proto
|
|
|
|
dispatch[PROTO] = load_proto
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_persid(self):
|
|
|
|
pid = self.readline()[:-1]
|
|
|
|
self.append(self.persistent_load(pid))
|
|
|
|
dispatch[PERSID] = load_persid
|
|
|
|
|
|
|
|
def load_binpersid(self):
|
2002-06-30 00:39:14 -03:00
|
|
|
pid = self.stack.pop()
|
1997-04-09 14:32:51 -03:00
|
|
|
self.append(self.persistent_load(pid))
|
|
|
|
dispatch[BINPERSID] = load_binpersid
|
|
|
|
|
|
|
|
def load_none(self):
|
|
|
|
self.append(None)
|
|
|
|
dispatch[NONE] = load_none
|
|
|
|
|
2003-01-28 00:25:27 -04:00
|
|
|
def load_false(self):
|
|
|
|
self.append(False)
|
|
|
|
dispatch[NEWFALSE] = load_false
|
|
|
|
|
|
|
|
def load_true(self):
|
|
|
|
self.append(True)
|
|
|
|
dispatch[NEWTRUE] = load_true
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_int(self):
|
2001-08-28 19:21:18 -03:00
|
|
|
data = self.readline()
|
2002-04-05 15:30:08 -04:00
|
|
|
if data == FALSE[1:]:
|
|
|
|
val = False
|
|
|
|
elif data == TRUE[1:]:
|
|
|
|
val = True
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
val = int(data)
|
|
|
|
except ValueError:
|
|
|
|
val = long(data)
|
|
|
|
self.append(val)
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[INT] = load_int
|
|
|
|
|
|
|
|
def load_binint(self):
|
|
|
|
self.append(mloads('i' + self.read(4)))
|
|
|
|
dispatch[BININT] = load_binint
|
|
|
|
|
|
|
|
def load_binint1(self):
|
2003-01-27 17:15:36 -04:00
|
|
|
self.append(ord(self.read(1)))
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[BININT1] = load_binint1
|
|
|
|
|
|
|
|
def load_binint2(self):
|
|
|
|
self.append(mloads('i' + self.read(2) + '\000\000'))
|
|
|
|
dispatch[BININT2] = load_binint2
|
2001-01-14 20:50:52 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_long(self):
|
2000-12-13 14:11:56 -04:00
|
|
|
self.append(long(self.readline()[:-1], 0))
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[LONG] = load_long
|
|
|
|
|
2003-01-27 23:49:52 -04:00
|
|
|
def load_long1(self):
|
|
|
|
n = ord(self.read(1))
|
|
|
|
bytes = self.read(n)
|
cPickle.c: Full support for the new LONG1 and LONG4. Added comments.
Assorted code cleanups; e.g., sizeof(char) is 1 by definition, so there's
no need to do things like multiply by sizeof(char) in hairy malloc
arguments. Fixed an undetected-overflow bug in readline_file().
longobject.c: Fixed a really stupid bug in the new _PyLong_NumBits.
pickle.py: Fixed stupid bug in save_long(): When proto is 2, it
wrote LONG1 or LONG4, but forgot to return then -- it went on to
append the proto 1 LONG opcode too.
Fixed equally stupid cancelling bugs in load_long1() and
load_long4(): they *returned* the unpickled long instead of pushing
it on the stack. The return values were ignored. Tests passed
before only because save_long() pickled the long twice.
Fixed bugs in encode_long().
Noted that decode_long() is quadratic-time despite our hopes,
because long(string, 16) is still quadratic-time in len(string).
It's hex() that's linear-time. I don't know a way to make decode_long()
linear-time in Python, short of maybe transforming the 256's-complement
bytes into marshal's funky internal format, and letting marshal decode
that. It would be more valuable to make long(string, 16) linear time.
pickletester.py: Added a global "protocols" vector so tests can try
all the protocols in a sane way. Changed test_ints() and test_unicode()
to do so. Added a new test_long(), but the tail end of it is disabled
because it "takes forever" under pickle.py (but runs very quickly under
cPickle: cPickle proto 2 for longs is linear-time).
2003-02-01 22:57:53 -04:00
|
|
|
self.append(decode_long(bytes))
|
2003-01-27 23:49:52 -04:00
|
|
|
dispatch[LONG1] = load_long1
|
|
|
|
|
|
|
|
def load_long4(self):
|
|
|
|
n = mloads('i' + self.read(4))
|
|
|
|
bytes = self.read(n)
|
cPickle.c: Full support for the new LONG1 and LONG4. Added comments.
Assorted code cleanups; e.g., sizeof(char) is 1 by definition, so there's
no need to do things like multiply by sizeof(char) in hairy malloc
arguments. Fixed an undetected-overflow bug in readline_file().
longobject.c: Fixed a really stupid bug in the new _PyLong_NumBits.
pickle.py: Fixed stupid bug in save_long(): When proto is 2, it
wrote LONG1 or LONG4, but forgot to return then -- it went on to
append the proto 1 LONG opcode too.
Fixed equally stupid cancelling bugs in load_long1() and
load_long4(): they *returned* the unpickled long instead of pushing
it on the stack. The return values were ignored. Tests passed
before only because save_long() pickled the long twice.
Fixed bugs in encode_long().
Noted that decode_long() is quadratic-time despite our hopes,
because long(string, 16) is still quadratic-time in len(string).
It's hex() that's linear-time. I don't know a way to make decode_long()
linear-time in Python, short of maybe transforming the 256's-complement
bytes into marshal's funky internal format, and letting marshal decode
that. It would be more valuable to make long(string, 16) linear time.
pickletester.py: Added a global "protocols" vector so tests can try
all the protocols in a sane way. Changed test_ints() and test_unicode()
to do so. Added a new test_long(), but the tail end of it is disabled
because it "takes forever" under pickle.py (but runs very quickly under
cPickle: cPickle proto 2 for longs is linear-time).
2003-02-01 22:57:53 -04:00
|
|
|
self.append(decode_long(bytes))
|
2003-01-27 23:49:52 -04:00
|
|
|
dispatch[LONG4] = load_long4
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_float(self):
|
2000-12-13 14:11:56 -04:00
|
|
|
self.append(float(self.readline()[:-1]))
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[FLOAT] = load_float
|
|
|
|
|
1998-10-22 17:15:36 -03:00
|
|
|
def load_binfloat(self, unpack=struct.unpack):
|
|
|
|
self.append(unpack('>d', self.read(8))[0])
|
|
|
|
dispatch[BINFLOAT] = load_binfloat
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_string(self):
|
2000-09-15 12:14:51 -03:00
|
|
|
rep = self.readline()[:-1]
|
2003-01-28 12:23:33 -04:00
|
|
|
for q in "\"'": # double or single quote
|
2002-08-14 04:46:28 -03:00
|
|
|
if rep.startswith(q):
|
|
|
|
if not rep.endswith(q):
|
|
|
|
raise ValueError, "insecure string pickle"
|
|
|
|
rep = rep[len(q):-len(q)]
|
|
|
|
break
|
|
|
|
else:
|
2000-09-15 12:14:51 -03:00
|
|
|
raise ValueError, "insecure string pickle"
|
2002-08-14 04:46:28 -03:00
|
|
|
self.append(rep.decode("string-escape"))
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[STRING] = load_string
|
|
|
|
|
|
|
|
def load_binstring(self):
|
|
|
|
len = mloads('i' + self.read(4))
|
|
|
|
self.append(self.read(len))
|
|
|
|
dispatch[BINSTRING] = load_binstring
|
|
|
|
|
2000-03-10 19:20:09 -04:00
|
|
|
def load_unicode(self):
|
|
|
|
self.append(unicode(self.readline()[:-1],'raw-unicode-escape'))
|
|
|
|
dispatch[UNICODE] = load_unicode
|
|
|
|
|
|
|
|
def load_binunicode(self):
|
|
|
|
len = mloads('i' + self.read(4))
|
|
|
|
self.append(unicode(self.read(len),'utf-8'))
|
|
|
|
dispatch[BINUNICODE] = load_binunicode
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_short_binstring(self):
|
2003-01-27 17:15:36 -04:00
|
|
|
len = ord(self.read(1))
|
1997-04-09 14:32:51 -03:00
|
|
|
self.append(self.read(len))
|
|
|
|
dispatch[SHORT_BINSTRING] = load_short_binstring
|
|
|
|
|
|
|
|
def load_tuple(self):
|
|
|
|
k = self.marker()
|
|
|
|
self.stack[k:] = [tuple(self.stack[k+1:])]
|
|
|
|
dispatch[TUPLE] = load_tuple
|
|
|
|
|
|
|
|
def load_empty_tuple(self):
|
|
|
|
self.stack.append(())
|
|
|
|
dispatch[EMPTY_TUPLE] = load_empty_tuple
|
|
|
|
|
2003-01-28 00:14:51 -04:00
|
|
|
def load_tuple1(self):
|
|
|
|
self.stack[-1] = (self.stack[-1],)
|
|
|
|
dispatch[TUPLE1] = load_tuple1
|
|
|
|
|
|
|
|
def load_tuple2(self):
|
|
|
|
self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
|
|
|
|
dispatch[TUPLE2] = load_tuple2
|
|
|
|
|
|
|
|
def load_tuple3(self):
|
|
|
|
self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
|
|
|
|
dispatch[TUPLE3] = load_tuple3
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_empty_list(self):
|
|
|
|
self.stack.append([])
|
|
|
|
dispatch[EMPTY_LIST] = load_empty_list
|
|
|
|
|
|
|
|
def load_empty_dictionary(self):
|
|
|
|
self.stack.append({})
|
|
|
|
dispatch[EMPTY_DICT] = load_empty_dictionary
|
|
|
|
|
|
|
|
def load_list(self):
|
|
|
|
k = self.marker()
|
|
|
|
self.stack[k:] = [self.stack[k+1:]]
|
|
|
|
dispatch[LIST] = load_list
|
|
|
|
|
|
|
|
def load_dict(self):
|
|
|
|
k = self.marker()
|
|
|
|
d = {}
|
|
|
|
items = self.stack[k+1:]
|
|
|
|
for i in range(0, len(items), 2):
|
|
|
|
key = items[i]
|
|
|
|
value = items[i+1]
|
|
|
|
d[key] = value
|
|
|
|
self.stack[k:] = [d]
|
|
|
|
dispatch[DICT] = load_dict
|
|
|
|
|
2003-01-30 11:41:46 -04:00
|
|
|
# INST and OBJ differ only in how they get a class object. It's not
|
|
|
|
# only sensible to do the rest in a common routine, the two routines
|
|
|
|
# previously diverged and grew different bugs.
|
|
|
|
# klass is the class to instantiate, and k points to the topmost mark
|
|
|
|
# object, following which are the arguments for klass.__init__.
|
|
|
|
def _instantiate(self, klass, k):
|
1997-04-09 14:32:51 -03:00
|
|
|
args = tuple(self.stack[k+1:])
|
|
|
|
del self.stack[k:]
|
1998-03-26 17:13:24 -04:00
|
|
|
instantiated = 0
|
2003-01-30 11:41:46 -04:00
|
|
|
if (not args and
|
|
|
|
type(klass) is ClassType and
|
|
|
|
not hasattr(klass, "__getinitargs__")):
|
1998-03-26 17:13:24 -04:00
|
|
|
try:
|
|
|
|
value = _EmptyClass()
|
|
|
|
value.__class__ = klass
|
1998-04-13 15:08:45 -03:00
|
|
|
instantiated = 1
|
1998-03-26 17:13:24 -04:00
|
|
|
except RuntimeError:
|
|
|
|
# In restricted execution, assignment to inst.__class__ is
|
|
|
|
# prohibited
|
|
|
|
pass
|
|
|
|
if not instantiated:
|
1998-09-15 17:25:57 -03:00
|
|
|
try:
|
2003-01-28 18:29:13 -04:00
|
|
|
value = klass(*args)
|
1998-09-15 17:25:57 -03:00
|
|
|
except TypeError, err:
|
|
|
|
raise TypeError, "in constructor for %s: %s" % (
|
|
|
|
klass.__name__, str(err)), sys.exc_info()[2]
|
1997-04-09 14:32:51 -03:00
|
|
|
self.append(value)
|
2003-01-30 11:41:46 -04:00
|
|
|
|
|
|
|
def load_inst(self):
|
|
|
|
module = self.readline()[:-1]
|
|
|
|
name = self.readline()[:-1]
|
|
|
|
klass = self.find_class(module, name)
|
|
|
|
self._instantiate(klass, self.marker())
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[INST] = load_inst
|
|
|
|
|
|
|
|
def load_obj(self):
|
2003-01-30 11:41:46 -04:00
|
|
|
# Stack is ... markobject classobject arg1 arg2 ...
|
1997-04-09 14:32:51 -03:00
|
|
|
k = self.marker()
|
2003-01-30 11:41:46 -04:00
|
|
|
klass = self.stack.pop(k+1)
|
|
|
|
self._instantiate(klass, k)
|
2001-01-14 20:50:52 -04:00
|
|
|
dispatch[OBJ] = load_obj
|
1997-04-09 14:32:51 -03:00
|
|
|
|
2003-01-28 11:10:22 -04:00
|
|
|
def load_newobj(self):
|
|
|
|
args = self.stack.pop()
|
|
|
|
cls = self.stack[-1]
|
|
|
|
obj = cls.__new__(cls, *args)
|
2003-01-28 13:55:05 -04:00
|
|
|
self.stack[-1] = obj
|
2003-01-28 11:10:22 -04:00
|
|
|
dispatch[NEWOBJ] = load_newobj
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_global(self):
|
|
|
|
module = self.readline()[:-1]
|
|
|
|
name = self.readline()[:-1]
|
|
|
|
klass = self.find_class(module, name)
|
|
|
|
self.append(klass)
|
|
|
|
dispatch[GLOBAL] = load_global
|
|
|
|
|
2003-01-29 02:14:11 -04:00
|
|
|
def load_ext1(self):
|
|
|
|
code = ord(self.read(1))
|
|
|
|
self.get_extension(code)
|
|
|
|
dispatch[EXT1] = load_ext1
|
|
|
|
|
|
|
|
def load_ext2(self):
|
|
|
|
code = mloads('i' + self.read(2) + '\000\000')
|
|
|
|
self.get_extension(code)
|
|
|
|
dispatch[EXT2] = load_ext2
|
|
|
|
|
|
|
|
def load_ext4(self):
|
|
|
|
code = mloads('i' + self.read(4))
|
|
|
|
self.get_extension(code)
|
|
|
|
dispatch[EXT4] = load_ext4
|
|
|
|
|
|
|
|
def get_extension(self, code):
|
|
|
|
nil = []
|
2003-02-03 21:54:49 -04:00
|
|
|
obj = _extension_cache.get(code, nil)
|
2003-01-29 02:14:11 -04:00
|
|
|
if obj is not nil:
|
|
|
|
self.append(obj)
|
|
|
|
return
|
2003-02-03 21:54:49 -04:00
|
|
|
key = _inverted_registry.get(code)
|
2003-01-29 02:14:11 -04:00
|
|
|
if not key:
|
|
|
|
raise ValueError("unregistered extension code %d" % code)
|
|
|
|
obj = self.find_class(*key)
|
2003-02-03 21:54:49 -04:00
|
|
|
_extension_cache[code] = obj
|
2003-01-29 02:14:11 -04:00
|
|
|
self.append(obj)
|
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def find_class(self, module, name):
|
2003-01-28 18:29:13 -04:00
|
|
|
# Subclasses may override this
|
2001-11-15 19:42:58 -04:00
|
|
|
__import__(module)
|
|
|
|
mod = sys.modules[module]
|
|
|
|
klass = getattr(mod, name)
|
1997-04-09 14:32:51 -03:00
|
|
|
return klass
|
|
|
|
|
|
|
|
def load_reduce(self):
|
|
|
|
stack = self.stack
|
2003-01-28 18:29:13 -04:00
|
|
|
args = stack.pop()
|
|
|
|
func = stack[-1]
|
|
|
|
if args is None:
|
2003-01-28 12:34:19 -04:00
|
|
|
# A hack for Jim Fulton's ExtensionClass, now deprecated
|
|
|
|
warnings.warn("__basicnew__ special case is deprecated",
|
2002-05-23 12:15:30 -03:00
|
|
|
DeprecationWarning)
|
2003-01-28 18:29:13 -04:00
|
|
|
value = func.__basicnew__()
|
1998-03-26 17:13:24 -04:00
|
|
|
else:
|
2003-01-28 18:29:13 -04:00
|
|
|
value = func(*args)
|
|
|
|
stack[-1] = value
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[REDUCE] = load_reduce
|
|
|
|
|
|
|
|
def load_pop(self):
|
|
|
|
del self.stack[-1]
|
|
|
|
dispatch[POP] = load_pop
|
|
|
|
|
|
|
|
def load_pop_mark(self):
|
|
|
|
k = self.marker()
|
1998-03-26 17:13:24 -04:00
|
|
|
del self.stack[k:]
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[POP_MARK] = load_pop_mark
|
|
|
|
|
|
|
|
def load_dup(self):
|
1998-03-31 13:00:46 -04:00
|
|
|
self.append(self.stack[-1])
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[DUP] = load_dup
|
|
|
|
|
|
|
|
def load_get(self):
|
|
|
|
self.append(self.memo[self.readline()[:-1]])
|
|
|
|
dispatch[GET] = load_get
|
|
|
|
|
|
|
|
def load_binget(self):
|
2003-01-27 17:15:36 -04:00
|
|
|
i = ord(self.read(1))
|
1997-04-09 14:32:51 -03:00
|
|
|
self.append(self.memo[`i`])
|
|
|
|
dispatch[BINGET] = load_binget
|
|
|
|
|
|
|
|
def load_long_binget(self):
|
|
|
|
i = mloads('i' + self.read(4))
|
|
|
|
self.append(self.memo[`i`])
|
|
|
|
dispatch[LONG_BINGET] = load_long_binget
|
|
|
|
|
|
|
|
def load_put(self):
|
|
|
|
self.memo[self.readline()[:-1]] = self.stack[-1]
|
|
|
|
dispatch[PUT] = load_put
|
|
|
|
|
|
|
|
def load_binput(self):
|
2003-01-27 17:15:36 -04:00
|
|
|
i = ord(self.read(1))
|
1997-04-09 14:32:51 -03:00
|
|
|
self.memo[`i`] = self.stack[-1]
|
|
|
|
dispatch[BINPUT] = load_binput
|
|
|
|
|
|
|
|
def load_long_binput(self):
|
|
|
|
i = mloads('i' + self.read(4))
|
|
|
|
self.memo[`i`] = self.stack[-1]
|
|
|
|
dispatch[LONG_BINPUT] = load_long_binput
|
|
|
|
|
|
|
|
def load_append(self):
|
|
|
|
stack = self.stack
|
2002-06-30 00:39:14 -03:00
|
|
|
value = stack.pop()
|
1997-04-09 14:32:51 -03:00
|
|
|
list = stack[-1]
|
|
|
|
list.append(value)
|
|
|
|
dispatch[APPEND] = load_append
|
|
|
|
|
|
|
|
def load_appends(self):
|
|
|
|
stack = self.stack
|
|
|
|
mark = self.marker()
|
|
|
|
list = stack[mark - 1]
|
2003-01-27 21:44:45 -04:00
|
|
|
list.extend(stack[mark + 1:])
|
1997-04-09 14:32:51 -03:00
|
|
|
del stack[mark:]
|
|
|
|
dispatch[APPENDS] = load_appends
|
2001-01-14 20:50:52 -04:00
|
|
|
|
1997-04-09 14:32:51 -03:00
|
|
|
def load_setitem(self):
|
|
|
|
stack = self.stack
|
2002-06-30 00:39:14 -03:00
|
|
|
value = stack.pop()
|
|
|
|
key = stack.pop()
|
1997-04-09 14:32:51 -03:00
|
|
|
dict = stack[-1]
|
|
|
|
dict[key] = value
|
|
|
|
dispatch[SETITEM] = load_setitem
|
|
|
|
|
|
|
|
def load_setitems(self):
|
|
|
|
stack = self.stack
|
|
|
|
mark = self.marker()
|
|
|
|
dict = stack[mark - 1]
|
1998-03-26 17:13:24 -04:00
|
|
|
for i in range(mark + 1, len(stack), 2):
|
1997-04-09 14:32:51 -03:00
|
|
|
dict[stack[i]] = stack[i + 1]
|
|
|
|
|
|
|
|
del stack[mark:]
|
|
|
|
dispatch[SETITEMS] = load_setitems
|
|
|
|
|
|
|
|
def load_build(self):
|
|
|
|
stack = self.stack
|
2003-01-28 18:01:16 -04:00
|
|
|
state = stack.pop()
|
1997-04-09 14:32:51 -03:00
|
|
|
inst = stack[-1]
|
2003-01-28 18:01:16 -04:00
|
|
|
setstate = getattr(inst, "__setstate__", None)
|
|
|
|
if setstate:
|
|
|
|
setstate(state)
|
|
|
|
return
|
|
|
|
slotstate = None
|
|
|
|
if isinstance(state, tuple) and len(state) == 2:
|
|
|
|
state, slotstate = state
|
|
|
|
if state:
|
1998-03-26 17:13:24 -04:00
|
|
|
try:
|
2003-01-28 18:01:16 -04:00
|
|
|
inst.__dict__.update(state)
|
1998-03-26 17:13:24 -04:00
|
|
|
except RuntimeError:
|
2003-01-28 18:01:16 -04:00
|
|
|
# XXX In restricted execution, the instance's __dict__
|
|
|
|
# is not accessible. Use the old way of unpickling
|
|
|
|
# the instance variables. This is a semantic
|
|
|
|
# difference when unpickling in restricted
|
|
|
|
# vs. unrestricted modes.
|
|
|
|
for k, v in state.items():
|
1998-03-26 17:13:24 -04:00
|
|
|
setattr(inst, k, v)
|
2003-01-28 18:01:16 -04:00
|
|
|
if slotstate:
|
|
|
|
for k, v in slotstate.items():
|
|
|
|
setattr(inst, k, v)
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[BUILD] = load_build
|
|
|
|
|
|
|
|
def load_mark(self):
|
|
|
|
self.append(self.mark)
|
|
|
|
dispatch[MARK] = load_mark
|
|
|
|
|
|
|
|
def load_stop(self):
|
2002-06-30 00:39:14 -03:00
|
|
|
value = self.stack.pop()
|
2000-12-13 14:11:56 -04:00
|
|
|
raise _Stop(value)
|
1997-04-09 14:32:51 -03:00
|
|
|
dispatch[STOP] = load_stop
|
1995-01-09 20:31:14 -04:00
|
|
|
|
1997-12-05 15:42:42 -04:00
|
|
|
# Helper class for load_inst/load_obj
|
|
|
|
|
|
|
|
class _EmptyClass:
|
|
|
|
pass
|
1995-01-09 20:31:14 -04:00
|
|
|
|
2003-01-30 23:43:58 -04:00
|
|
|
# Encode/decode longs in linear time.
|
|
|
|
|
|
|
|
import binascii as _binascii
|
2003-01-27 23:49:52 -04:00
|
|
|
|
|
|
|
def encode_long(x):
|
2003-01-30 23:43:58 -04:00
|
|
|
r"""Encode a long to a two's complement little-endian binary string.
|
2003-01-31 12:43:39 -04:00
|
|
|
Note that 0L is a special case, returning an empty string, to save a
|
|
|
|
byte in the LONG1 pickling context.
|
|
|
|
|
|
|
|
>>> encode_long(0L)
|
|
|
|
''
|
2003-01-27 23:49:52 -04:00
|
|
|
>>> encode_long(255L)
|
|
|
|
'\xff\x00'
|
|
|
|
>>> encode_long(32767L)
|
|
|
|
'\xff\x7f'
|
|
|
|
>>> encode_long(-256L)
|
|
|
|
'\x00\xff'
|
|
|
|
>>> encode_long(-32768L)
|
|
|
|
'\x00\x80'
|
|
|
|
>>> encode_long(-128L)
|
|
|
|
'\x80'
|
|
|
|
>>> encode_long(127L)
|
|
|
|
'\x7f'
|
|
|
|
>>>
|
|
|
|
"""
|
2003-01-30 23:43:58 -04:00
|
|
|
|
|
|
|
if x == 0:
|
2003-01-31 12:43:39 -04:00
|
|
|
return ''
|
2003-01-30 23:43:58 -04:00
|
|
|
if x > 0:
|
|
|
|
ashex = hex(x)
|
|
|
|
assert ashex.startswith("0x")
|
|
|
|
njunkchars = 2 + ashex.endswith('L')
|
|
|
|
nibbles = len(ashex) - njunkchars
|
|
|
|
if nibbles & 1:
|
|
|
|
# need an even # of nibbles for unhexlify
|
|
|
|
ashex = "0x0" + ashex[2:]
|
2003-01-31 12:43:39 -04:00
|
|
|
elif int(ashex[2], 16) >= 8:
|
2003-01-30 23:43:58 -04:00
|
|
|
# "looks negative", so need a byte of sign bits
|
|
|
|
ashex = "0x00" + ashex[2:]
|
|
|
|
else:
|
|
|
|
# Build the 256's-complement: (1L << nbytes) + x. The trick is
|
|
|
|
# to find the number of bytes in linear time (although that should
|
|
|
|
# really be a constant-time task).
|
|
|
|
ashex = hex(-x)
|
|
|
|
assert ashex.startswith("0x")
|
|
|
|
njunkchars = 2 + ashex.endswith('L')
|
|
|
|
nibbles = len(ashex) - njunkchars
|
|
|
|
if nibbles & 1:
|
cPickle.c: Full support for the new LONG1 and LONG4. Added comments.
Assorted code cleanups; e.g., sizeof(char) is 1 by definition, so there's
no need to do things like multiply by sizeof(char) in hairy malloc
arguments. Fixed an undetected-overflow bug in readline_file().
longobject.c: Fixed a really stupid bug in the new _PyLong_NumBits.
pickle.py: Fixed stupid bug in save_long(): When proto is 2, it
wrote LONG1 or LONG4, but forgot to return then -- it went on to
append the proto 1 LONG opcode too.
Fixed equally stupid cancelling bugs in load_long1() and
load_long4(): they *returned* the unpickled long instead of pushing
it on the stack. The return values were ignored. Tests passed
before only because save_long() pickled the long twice.
Fixed bugs in encode_long().
Noted that decode_long() is quadratic-time despite our hopes,
because long(string, 16) is still quadratic-time in len(string).
It's hex() that's linear-time. I don't know a way to make decode_long()
linear-time in Python, short of maybe transforming the 256's-complement
bytes into marshal's funky internal format, and letting marshal decode
that. It would be more valuable to make long(string, 16) linear time.
pickletester.py: Added a global "protocols" vector so tests can try
all the protocols in a sane way. Changed test_ints() and test_unicode()
to do so. Added a new test_long(), but the tail end of it is disabled
because it "takes forever" under pickle.py (but runs very quickly under
cPickle: cPickle proto 2 for longs is linear-time).
2003-02-01 22:57:53 -04:00
|
|
|
# Extend to a full byte.
|
2003-01-30 23:43:58 -04:00
|
|
|
nibbles += 1
|
2003-01-31 12:43:39 -04:00
|
|
|
nbits = nibbles * 4
|
|
|
|
x += 1L << nbits
|
2003-01-30 23:43:58 -04:00
|
|
|
assert x > 0
|
|
|
|
ashex = hex(x)
|
cPickle.c: Full support for the new LONG1 and LONG4. Added comments.
Assorted code cleanups; e.g., sizeof(char) is 1 by definition, so there's
no need to do things like multiply by sizeof(char) in hairy malloc
arguments. Fixed an undetected-overflow bug in readline_file().
longobject.c: Fixed a really stupid bug in the new _PyLong_NumBits.
pickle.py: Fixed stupid bug in save_long(): When proto is 2, it
wrote LONG1 or LONG4, but forgot to return then -- it went on to
append the proto 1 LONG opcode too.
Fixed equally stupid cancelling bugs in load_long1() and
load_long4(): they *returned* the unpickled long instead of pushing
it on the stack. The return values were ignored. Tests passed
before only because save_long() pickled the long twice.
Fixed bugs in encode_long().
Noted that decode_long() is quadratic-time despite our hopes,
because long(string, 16) is still quadratic-time in len(string).
It's hex() that's linear-time. I don't know a way to make decode_long()
linear-time in Python, short of maybe transforming the 256's-complement
bytes into marshal's funky internal format, and letting marshal decode
that. It would be more valuable to make long(string, 16) linear time.
pickletester.py: Added a global "protocols" vector so tests can try
all the protocols in a sane way. Changed test_ints() and test_unicode()
to do so. Added a new test_long(), but the tail end of it is disabled
because it "takes forever" under pickle.py (but runs very quickly under
cPickle: cPickle proto 2 for longs is linear-time).
2003-02-01 22:57:53 -04:00
|
|
|
njunkchars = 2 + ashex.endswith('L')
|
|
|
|
newnibbles = len(ashex) - njunkchars
|
|
|
|
if newnibbles < nibbles:
|
|
|
|
ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:]
|
|
|
|
if int(ashex[2], 16) < 8:
|
2003-01-30 23:43:58 -04:00
|
|
|
# "looks positive", so need a byte of sign bits
|
cPickle.c: Full support for the new LONG1 and LONG4. Added comments.
Assorted code cleanups; e.g., sizeof(char) is 1 by definition, so there's
no need to do things like multiply by sizeof(char) in hairy malloc
arguments. Fixed an undetected-overflow bug in readline_file().
longobject.c: Fixed a really stupid bug in the new _PyLong_NumBits.
pickle.py: Fixed stupid bug in save_long(): When proto is 2, it
wrote LONG1 or LONG4, but forgot to return then -- it went on to
append the proto 1 LONG opcode too.
Fixed equally stupid cancelling bugs in load_long1() and
load_long4(): they *returned* the unpickled long instead of pushing
it on the stack. The return values were ignored. Tests passed
before only because save_long() pickled the long twice.
Fixed bugs in encode_long().
Noted that decode_long() is quadratic-time despite our hopes,
because long(string, 16) is still quadratic-time in len(string).
It's hex() that's linear-time. I don't know a way to make decode_long()
linear-time in Python, short of maybe transforming the 256's-complement
bytes into marshal's funky internal format, and letting marshal decode
that. It would be more valuable to make long(string, 16) linear time.
pickletester.py: Added a global "protocols" vector so tests can try
all the protocols in a sane way. Changed test_ints() and test_unicode()
to do so. Added a new test_long(), but the tail end of it is disabled
because it "takes forever" under pickle.py (but runs very quickly under
cPickle: cPickle proto 2 for longs is linear-time).
2003-02-01 22:57:53 -04:00
|
|
|
ashex = "0xff" + ashex[2:]
|
2003-01-30 23:43:58 -04:00
|
|
|
|
|
|
|
if ashex.endswith('L'):
|
|
|
|
ashex = ashex[2:-1]
|
|
|
|
else:
|
|
|
|
ashex = ashex[2:]
|
cPickle.c: Full support for the new LONG1 and LONG4. Added comments.
Assorted code cleanups; e.g., sizeof(char) is 1 by definition, so there's
no need to do things like multiply by sizeof(char) in hairy malloc
arguments. Fixed an undetected-overflow bug in readline_file().
longobject.c: Fixed a really stupid bug in the new _PyLong_NumBits.
pickle.py: Fixed stupid bug in save_long(): When proto is 2, it
wrote LONG1 or LONG4, but forgot to return then -- it went on to
append the proto 1 LONG opcode too.
Fixed equally stupid cancelling bugs in load_long1() and
load_long4(): they *returned* the unpickled long instead of pushing
it on the stack. The return values were ignored. Tests passed
before only because save_long() pickled the long twice.
Fixed bugs in encode_long().
Noted that decode_long() is quadratic-time despite our hopes,
because long(string, 16) is still quadratic-time in len(string).
It's hex() that's linear-time. I don't know a way to make decode_long()
linear-time in Python, short of maybe transforming the 256's-complement
bytes into marshal's funky internal format, and letting marshal decode
that. It would be more valuable to make long(string, 16) linear time.
pickletester.py: Added a global "protocols" vector so tests can try
all the protocols in a sane way. Changed test_ints() and test_unicode()
to do so. Added a new test_long(), but the tail end of it is disabled
because it "takes forever" under pickle.py (but runs very quickly under
cPickle: cPickle proto 2 for longs is linear-time).
2003-02-01 22:57:53 -04:00
|
|
|
assert len(ashex) & 1 == 0, (x, ashex)
|
2003-01-30 23:43:58 -04:00
|
|
|
binary = _binascii.unhexlify(ashex)
|
|
|
|
return binary[::-1]
|
2003-01-27 23:49:52 -04:00
|
|
|
|
|
|
|
def decode_long(data):
|
|
|
|
r"""Decode a long from a two's complement little-endian binary string.
|
2003-01-31 12:43:39 -04:00
|
|
|
|
|
|
|
>>> decode_long('')
|
|
|
|
0L
|
2003-01-27 23:49:52 -04:00
|
|
|
>>> decode_long("\xff\x00")
|
|
|
|
255L
|
|
|
|
>>> decode_long("\xff\x7f")
|
|
|
|
32767L
|
|
|
|
>>> decode_long("\x00\xff")
|
|
|
|
-256L
|
|
|
|
>>> decode_long("\x00\x80")
|
|
|
|
-32768L
|
|
|
|
>>> decode_long("\x80")
|
|
|
|
-128L
|
|
|
|
>>> decode_long("\x7f")
|
|
|
|
127L
|
|
|
|
"""
|
2003-01-30 23:43:58 -04:00
|
|
|
|
2003-01-31 12:43:39 -04:00
|
|
|
nbytes = len(data)
|
|
|
|
if nbytes == 0:
|
|
|
|
return 0L
|
2003-01-30 23:43:58 -04:00
|
|
|
ashex = _binascii.hexlify(data[::-1])
|
2003-02-02 03:51:32 -04:00
|
|
|
n = long(ashex, 16) # quadratic time before Python 2.3; linear now
|
2003-01-30 23:43:58 -04:00
|
|
|
if data[-1] >= '\x80':
|
2003-01-31 12:43:39 -04:00
|
|
|
n -= 1L << (nbytes * 8)
|
2003-01-30 23:43:58 -04:00
|
|
|
return n
|
2003-01-27 23:49:52 -04:00
|
|
|
|
1995-03-14 11:09:05 -04:00
|
|
|
# Shorthands
|
|
|
|
|
2001-10-15 18:29:28 -03:00
|
|
|
try:
|
|
|
|
from cStringIO import StringIO
|
|
|
|
except ImportError:
|
|
|
|
from StringIO import StringIO
|
1996-07-22 19:26:07 -03:00
|
|
|
|
2003-02-03 12:59:48 -04:00
|
|
|
def dump(obj, file, proto=None, bin=None):
|
|
|
|
Pickler(file, proto, bin).dump(obj)
|
1995-03-14 11:09:05 -04:00
|
|
|
|
2003-02-03 12:59:48 -04:00
|
|
|
def dumps(obj, proto=None, bin=None):
|
1997-04-09 14:32:51 -03:00
|
|
|
file = StringIO()
|
2003-02-03 12:59:48 -04:00
|
|
|
Pickler(file, proto, bin).dump(obj)
|
1997-04-09 14:32:51 -03:00
|
|
|
return file.getvalue()
|
1995-03-14 11:09:05 -04:00
|
|
|
|
|
|
|
def load(file):
|
1997-04-09 14:32:51 -03:00
|
|
|
return Unpickler(file).load()
|
1995-03-14 11:09:05 -04:00
|
|
|
|
|
|
|
def loads(str):
|
1997-04-09 14:32:51 -03:00
|
|
|
file = StringIO(str)
|
|
|
|
return Unpickler(file).load()
|
2003-01-27 23:49:52 -04:00
|
|
|
|
|
|
|
# Doctest
|
|
|
|
|
|
|
|
def _test():
|
|
|
|
import doctest
|
|
|
|
return doctest.testmod()
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
_test()
|