Merge.
This commit is contained in:
commit
1b329e791a
1
.hgtags
1
.hgtags
|
@ -146,6 +146,7 @@ ab2c023a9432f16652e89c404bbc84aa91bf55af v3.4.2
|
|||
b4cbecbc0781e89a309d03b60a1f75f8499250e6 v3.4.3
|
||||
04f3f725896c6961212c3a12e8ac25be6958f4fa v3.4.4rc1
|
||||
737efcadf5a678b184e0fa431aae11276bf06648 v3.4.4
|
||||
3631bb4a2490292ebf81d3e947ae36da145da564 v3.4.5rc1
|
||||
5d4b6a57d5fd7564bf73f3db0e46fe5eeb00bcd8 v3.5.0a1
|
||||
0337bd7ebcb6559d69679bc7025059ad1ce4f432 v3.5.0a2
|
||||
82656e28b5e5c4ae48d8dd8b5f0d7968908a82b6 v3.5.0a3
|
||||
|
|
|
@ -166,7 +166,7 @@ autobuild-dev:
|
|||
-make suspicious
|
||||
|
||||
# for quick rebuilds (HTML only)
|
||||
autobuild-html:
|
||||
autobuild-dev-html:
|
||||
make html SPHINXOPTS='-A daily=1 -A versionswitcher=1'
|
||||
|
||||
# for stable releases: only build if not in pre-release stage (alpha, beta)
|
||||
|
@ -177,3 +177,10 @@ autobuild-stable:
|
|||
exit 1;; \
|
||||
esac
|
||||
@make autobuild-dev
|
||||
|
||||
autobuild-stable-html:
|
||||
@case $(DISTVERSION) in *[ab]*) \
|
||||
echo "Not building; $(DISTVERSION) is not a release version."; \
|
||||
exit 1;; \
|
||||
esac
|
||||
@make autobuild-dev-html
|
||||
|
|
|
@ -59,10 +59,10 @@ Module Objects
|
|||
.. index:: single: __dict__ (module attribute)
|
||||
|
||||
Return the dictionary object that implements *module*'s namespace; this object
|
||||
is the same as the :attr:`__dict__` attribute of the module object. This
|
||||
is the same as the :attr:`~object.__dict__` attribute of the module object. This
|
||||
function never fails. It is recommended extensions use other
|
||||
:c:func:`PyModule_\*` and :c:func:`PyObject_\*` functions rather than directly
|
||||
manipulate a module's :attr:`__dict__`.
|
||||
manipulate a module's :attr:`~object.__dict__`.
|
||||
|
||||
|
||||
.. c:function:: PyObject* PyModule_GetNameObject(PyObject *module)
|
||||
|
|
|
@ -150,9 +150,8 @@ specific C type of the *self* object.
|
|||
The :attr:`ml_flags` field is a bitfield which can include the following flags.
|
||||
The individual flags indicate either a calling convention or a binding
|
||||
convention. Of the calling convention flags, only :const:`METH_VARARGS` and
|
||||
:const:`METH_KEYWORDS` can be combined (but note that :const:`METH_KEYWORDS`
|
||||
alone is equivalent to ``METH_VARARGS | METH_KEYWORDS``). Any of the calling
|
||||
convention flags can be combined with a binding flag.
|
||||
:const:`METH_KEYWORDS` can be combined. Any of the calling convention flags
|
||||
can be combined with a binding flag.
|
||||
|
||||
|
||||
.. data:: METH_VARARGS
|
||||
|
|
|
@ -111,10 +111,10 @@ type objects) *must* have the :attr:`ob_size` field.
|
|||
For statically allocated type objects, the tp_name field should contain a dot.
|
||||
Everything before the last dot is made accessible as the :attr:`__module__`
|
||||
attribute, and everything after the last dot is made accessible as the
|
||||
:attr:`__name__` attribute.
|
||||
:attr:`~definition.__name__` attribute.
|
||||
|
||||
If no dot is present, the entire :c:member:`~PyTypeObject.tp_name` field is made accessible as the
|
||||
:attr:`__name__` attribute, and the :attr:`__module__` attribute is undefined
|
||||
:attr:`~definition.__name__` attribute, and the :attr:`__module__` attribute is undefined
|
||||
(unless explicitly set in the dictionary, as explained above). This means your
|
||||
type will be impossible to pickle.
|
||||
|
||||
|
|
|
@ -438,7 +438,7 @@ remember the methods for a list, they can do something like this::
|
|||
>>> L
|
||||
[1]
|
||||
|
||||
With the interpreter, documentation is never far from the student as he's
|
||||
With the interpreter, documentation is never far from the student as they are
|
||||
programming.
|
||||
|
||||
There are also good IDEs for Python. IDLE is a cross-platform IDE for Python
|
||||
|
|
|
@ -374,9 +374,7 @@ module. If you have :mod:`tkinter` available, you may also want to look at
|
|||
:source:`Tools/demo/redemo.py`, a demonstration program included with the
|
||||
Python distribution. It allows you to enter REs and strings, and displays
|
||||
whether the RE matches or fails. :file:`redemo.py` can be quite useful when
|
||||
trying to debug a complicated RE. Phil Schwartz's `Kodos
|
||||
<http://kodos.sourceforge.net/>`_ is also an interactive tool for developing and
|
||||
testing RE patterns.
|
||||
trying to debug a complicated RE.
|
||||
|
||||
This HOWTO uses the standard Python interpreter for its examples. First, run the
|
||||
Python interpreter, import the :mod:`re` module, and compile a RE::
|
||||
|
|
|
@ -37,6 +37,6 @@ that wants to implement an :func:`open` function that wraps the built-in
|
|||
|
||||
As an implementation detail, most modules have the name ``__builtins__`` made
|
||||
available as part of their globals. The value of ``__builtins__`` is normally
|
||||
either this module or the value of this module's :attr:`__dict__` attribute.
|
||||
either this module or the value of this module's :attr:`~object.__dict__` attribute.
|
||||
Since this is an implementation detail, it may not be used by alternate
|
||||
implementations of Python.
|
||||
|
|
|
@ -115,12 +115,12 @@ C library:
|
|||
|
||||
.. function:: isblank(c)
|
||||
|
||||
Checks for an ASCII whitespace character.
|
||||
Checks for an ASCII whitespace character; space or horizontal tab.
|
||||
|
||||
|
||||
.. function:: iscntrl(c)
|
||||
|
||||
Checks for an ASCII control character (in the range 0x00 to 0x1f).
|
||||
Checks for an ASCII control character (in the range 0x00 to 0x1f or 0x7f).
|
||||
|
||||
|
||||
.. function:: isdigit(c)
|
||||
|
|
|
@ -128,6 +128,9 @@ The individual submodules are described in the following sections.
|
|||
:platform: Unix
|
||||
:synopsis: GNU's reinterpretation of dbm.
|
||||
|
||||
**Source code:** :source:`Lib/dbm/gnu.py`
|
||||
|
||||
--------------
|
||||
|
||||
This module is quite similar to the :mod:`dbm` module, but uses the GNU library
|
||||
``gdbm`` instead to provide some additional functionality. Please note that the
|
||||
|
@ -237,6 +240,9 @@ supported.
|
|||
:platform: Unix
|
||||
:synopsis: The standard "database" interface, based on ndbm.
|
||||
|
||||
**Source code:** :source:`Lib/dbm/ndbm.py`
|
||||
|
||||
--------------
|
||||
|
||||
The :mod:`dbm.ndbm` module provides an interface to the Unix "(n)dbm" library.
|
||||
Dbm objects behave like mappings (dictionaries), except that keys and values are
|
||||
|
@ -299,6 +305,8 @@ to locate the appropriate header file to simplify building this module.
|
|||
.. module:: dbm.dumb
|
||||
:synopsis: Portable implementation of the simple DBM interface.
|
||||
|
||||
**Source code:** :source:`Lib/dbm/dumb.py`
|
||||
|
||||
.. index:: single: databases
|
||||
|
||||
.. note::
|
||||
|
@ -308,6 +316,8 @@ to locate the appropriate header file to simplify building this module.
|
|||
module is not written for speed and is not nearly as heavily used as the other
|
||||
database modules.
|
||||
|
||||
--------------
|
||||
|
||||
The :mod:`dbm.dumb` module provides a persistent dictionary-like interface which
|
||||
is written entirely in Python. Unlike other modules such as :mod:`dbm.gnu` no
|
||||
external library is required. As with other persistent mappings, the keys and
|
||||
|
|
|
@ -431,7 +431,7 @@ The solution is to specify the module name explicitly as follows::
|
|||
the source, pickling will be disabled.
|
||||
|
||||
The new pickle protocol 4 also, in some circumstances, relies on
|
||||
:attr:`__qualname__` being set to the location where pickle will be able
|
||||
:attr:`~definition.__qualname__` being set to the location where pickle will be able
|
||||
to find the class. For example, if the class was made available in class
|
||||
SomeData in the global scope::
|
||||
|
||||
|
|
|
@ -304,7 +304,7 @@ are always available. They are listed here in alphabetical order.
|
|||
:func:`dir` reports their attributes.
|
||||
|
||||
If the object does not provide :meth:`__dir__`, the function tries its best to
|
||||
gather information from the object's :attr:`__dict__` attribute, if defined, and
|
||||
gather information from the object's :attr:`~object.__dict__` attribute, if defined, and
|
||||
from its type object. The resulting list is not necessarily complete, and may
|
||||
be inaccurate when the object has a custom :func:`__getattr__`.
|
||||
|
||||
|
@ -1446,7 +1446,7 @@ are always available. They are listed here in alphabetical order.
|
|||
|
||||
With three arguments, return a new type object. This is essentially a
|
||||
dynamic form of the :keyword:`class` statement. The *name* string is the
|
||||
class name and becomes the :attr:`~class.__name__` attribute; the *bases*
|
||||
class name and becomes the :attr:`~definition.__name__` attribute; the *bases*
|
||||
tuple itemizes the base classes and becomes the :attr:`~class.__bases__`
|
||||
attribute; and the *dict* dictionary is the namespace containing definitions
|
||||
for class body and is copied to a standard dictionary to become the
|
||||
|
@ -1464,12 +1464,12 @@ are always available. They are listed here in alphabetical order.
|
|||
.. function:: vars([object])
|
||||
|
||||
Return the :attr:`~object.__dict__` attribute for a module, class, instance,
|
||||
or any other object with a :attr:`__dict__` attribute.
|
||||
or any other object with a :attr:`~object.__dict__` attribute.
|
||||
|
||||
Objects such as modules and instances have an updateable :attr:`__dict__`
|
||||
Objects such as modules and instances have an updateable :attr:`~object.__dict__`
|
||||
attribute; however, other objects may have write restrictions on their
|
||||
:attr:`__dict__` attributes (for example, classes use a
|
||||
dictproxy to prevent direct dictionary updates).
|
||||
:attr:`~object.__dict__` attributes (for example, classes use a
|
||||
:class:`types.MappingProxyType` to prevent direct dictionary updates).
|
||||
|
||||
Without an argument, :func:`vars` acts like :func:`locals`. Note, the
|
||||
locals dictionary is only useful for reads since updates to the locals
|
||||
|
|
|
@ -474,7 +474,7 @@ have three read-only attributes:
|
|||
|
||||
:class:`partial` objects are like :class:`function` objects in that they are
|
||||
callable, weak referencable, and can have attributes. There are some important
|
||||
differences. For instance, the :attr:`__name__` and :attr:`__doc__` attributes
|
||||
differences. For instance, the :attr:`~definition.__name__` and :attr:`__doc__` attributes
|
||||
are not created automatically. Also, :class:`partial` objects defined in
|
||||
classes behave like static methods and do not transform into bound methods
|
||||
during instance attribute look-up.
|
||||
|
|
|
@ -211,6 +211,11 @@ Functions
|
|||
.. module:: importlib.abc
|
||||
:synopsis: Abstract base classes related to import
|
||||
|
||||
**Source code:** :source:`Lib/importlib/abc.py`
|
||||
|
||||
--------------
|
||||
|
||||
|
||||
The :mod:`importlib.abc` module contains all of the core abstract base classes
|
||||
used by :keyword:`import`. Some subclasses of the core abstract base classes
|
||||
are also provided to help in implementing the core ABCs.
|
||||
|
@ -700,6 +705,10 @@ ABC hierarchy::
|
|||
.. module:: importlib.machinery
|
||||
:synopsis: Importers and path hooks
|
||||
|
||||
**Source code:** :source:`Lib/importlib/machinery.py`
|
||||
|
||||
--------------
|
||||
|
||||
This module contains the various objects that help :keyword:`import`
|
||||
find and load modules.
|
||||
|
||||
|
@ -1082,6 +1091,11 @@ find and load modules.
|
|||
.. module:: importlib.util
|
||||
:synopsis: Utility code for importers
|
||||
|
||||
|
||||
**Source code:** :source:`Lib/importlib/util.py`
|
||||
|
||||
--------------
|
||||
|
||||
This module contains the various objects that help in the construction of
|
||||
an :term:`importer`.
|
||||
|
||||
|
|
|
@ -374,8 +374,9 @@ attributes:
|
|||
are true.
|
||||
|
||||
This, for example, is true of ``int.__add__``. An object passing this test
|
||||
has a :attr:`__get__` attribute but not a :attr:`__set__` attribute, but
|
||||
beyond that the set of attributes varies. :attr:`__name__` is usually
|
||||
has a :meth:`~object.__get__` method but not a :meth:`~object.__set__`
|
||||
method, but beyond that the set of attributes varies. A
|
||||
:attr:`~definition.__name__` attribute is usually
|
||||
sensible, and :attr:`__doc__` often is.
|
||||
|
||||
Methods implemented via descriptors that also pass one of the other tests
|
||||
|
@ -388,11 +389,11 @@ attributes:
|
|||
|
||||
Return true if the object is a data descriptor.
|
||||
|
||||
Data descriptors have both a :attr:`__get__` and a :attr:`__set__` attribute.
|
||||
Data descriptors have both a :attr:`~object.__get__` and a :attr:`~object.__set__` method.
|
||||
Examples are properties (defined in Python), getsets, and members. The
|
||||
latter two are defined in C and there are more specific tests available for
|
||||
those types, which is robust across Python implementations. Typically, data
|
||||
descriptors will also have :attr:`__name__` and :attr:`__doc__` attributes
|
||||
descriptors will also have :attr:`~definition.__name__` and :attr:`__doc__` attributes
|
||||
(properties, getsets, and members have both of these attributes), but this is
|
||||
not guaranteed.
|
||||
|
||||
|
|
|
@ -629,13 +629,19 @@ when serializing instances of "exotic" numerical types such as
|
|||
:class:`decimal.Decimal`.
|
||||
|
||||
.. highlight:: bash
|
||||
.. module:: json.tool
|
||||
|
||||
.. _json-commandline:
|
||||
|
||||
Command Line Interface
|
||||
----------------------
|
||||
|
||||
.. module:: json.tool
|
||||
:synopsis: A command line to validate and pretty-print JSON.
|
||||
|
||||
**Source code:** :source:`Lib/json/tool.py`
|
||||
|
||||
--------------
|
||||
|
||||
The :mod:`json.tool` module provides a simple command line interface to validate
|
||||
and pretty-print JSON objects.
|
||||
|
||||
|
|
|
@ -1010,7 +1010,7 @@ Connection objects are usually created using :func:`Pipe` -- see also
|
|||
using :meth:`recv`.
|
||||
|
||||
The object must be picklable. Very large pickles (approximately 32 MB+,
|
||||
though it depends on the OS) may raise a ValueError exception.
|
||||
though it depends on the OS) may raise a :exc:`ValueError` exception.
|
||||
|
||||
.. method:: recv()
|
||||
|
||||
|
@ -2723,12 +2723,7 @@ start method.
|
|||
|
||||
More picklability
|
||||
|
||||
Ensure that all arguments to :meth:`Process.__init__` are
|
||||
picklable. This means, in particular, that bound or unbound
|
||||
methods cannot be used directly as the ``target`` (unless you use
|
||||
the *fork* start method) --- just define a function and use that
|
||||
instead.
|
||||
|
||||
Ensure that all arguments to :meth:`Process.__init__` are picklable.
|
||||
Also, if you subclass :class:`~multiprocessing.Process` then make sure that
|
||||
instances will be picklable when the :meth:`Process.start
|
||||
<multiprocessing.Process.start>` method is called.
|
||||
|
|
|
@ -1195,7 +1195,11 @@ or `the MSDN <https://msdn.microsoft.com/en-us/library/z0kc8e3z.aspx>`_ on Windo
|
|||
.. function:: writev(fd, buffers)
|
||||
|
||||
Write the contents of *buffers* to file descriptor *fd*. *buffers* must be a
|
||||
sequence of :term:`bytes-like objects <bytes-like object>`.
|
||||
sequence of :term:`bytes-like objects <bytes-like object>`. Buffers are
|
||||
processed in array order. Entire contents of first buffer is written before
|
||||
proceeding to second, and so on. The operating system may set a limit
|
||||
(sysconf() value SC_IOV_MAX) on the number of buffers that can be used.
|
||||
|
||||
:func:`~os.writev` writes the contents of each object to the file descriptor
|
||||
and returns the total number of bytes written.
|
||||
|
||||
|
@ -2049,9 +2053,8 @@ features:
|
|||
|
||||
Note that there is a nice correspondence between several attributes
|
||||
and methods of ``DirEntry`` and of :class:`pathlib.Path`. In
|
||||
particular, the ``name`` and ``path`` attributes have the same
|
||||
meaning, as do the ``is_dir()``, ``is_file()``, ``is_symlink()``
|
||||
and ``stat()`` methods.
|
||||
particular, the ``name`` attribute has the same meaning, as do the
|
||||
``is_dir()``, ``is_file()``, ``is_symlink()`` and ``stat()`` methods.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
|
||||
|
|
|
@ -443,7 +443,7 @@ three digits in length.
|
|||
The ``'\u'`` and ``'\U'`` escape sequences have been added.
|
||||
|
||||
.. deprecated-removed:: 3.5 3.6
|
||||
Unknown escapes consist of ``'\'`` and ASCII letter now raise a
|
||||
Unknown escapes consisting of ``'\'`` and ASCII letter now raise a
|
||||
deprecation warning and will be forbidden in Python 3.6.
|
||||
|
||||
|
||||
|
|
|
@ -104,7 +104,9 @@ The following functions operate on a history file:
|
|||
|
||||
Append the last *nelements* items of history to a file. The default filename is
|
||||
:file:`~/.history`. The file must already exist. This calls
|
||||
:c:func:`append_history` in the underlying library.
|
||||
:c:func:`append_history` in the underlying library. This function
|
||||
only exists if Python was compiled for a version of the library
|
||||
that supports it.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
|
||||
|
@ -185,7 +187,8 @@ Startup hooks
|
|||
be used as the new hook function; if omitted or ``None``, any
|
||||
function already installed is removed. The hook is called
|
||||
with no arguments after the first prompt has been printed and just before
|
||||
readline starts reading input characters.
|
||||
readline starts reading input characters. This function only exists
|
||||
if Python was compiled for a version of the library that supports it.
|
||||
|
||||
|
||||
Completion
|
||||
|
|
|
@ -309,25 +309,26 @@ Connection Objects
|
|||
call :meth:`commit`. If you just close your database connection without
|
||||
calling :meth:`commit` first, your changes will be lost!
|
||||
|
||||
.. method:: execute(sql, [parameters])
|
||||
.. method:: execute(sql[, parameters])
|
||||
|
||||
This is a nonstandard shortcut that creates an intermediate cursor object by
|
||||
calling the cursor method, then calls the cursor's :meth:`execute
|
||||
<Cursor.execute>` method with the parameters given.
|
||||
This is a nonstandard shortcut that creates a cursor object by calling
|
||||
the :meth:`~Connection.cursor` method, calls the cursor's
|
||||
:meth:`~Cursor.execute` method with the *parameters* given, and returns
|
||||
the cursor.
|
||||
|
||||
.. method:: executemany(sql[, parameters])
|
||||
|
||||
.. method:: executemany(sql, [parameters])
|
||||
|
||||
This is a nonstandard shortcut that creates an intermediate cursor object by
|
||||
calling the cursor method, then calls the cursor's :meth:`executemany
|
||||
<Cursor.executemany>` method with the parameters given.
|
||||
This is a nonstandard shortcut that creates a cursor object by
|
||||
calling the :meth:`~Connection.cursor` method, calls the cursor's
|
||||
:meth:`~Cursor.executemany` method with the *parameters* given, and
|
||||
returns the cursor.
|
||||
|
||||
.. method:: executescript(sql_script)
|
||||
|
||||
This is a nonstandard shortcut that creates an intermediate cursor object by
|
||||
calling the cursor method, then calls the cursor's :meth:`executescript
|
||||
<Cursor.executescript>` method with the parameters given.
|
||||
|
||||
This is a nonstandard shortcut that creates a cursor object by
|
||||
calling the :meth:`~Connection.cursor` method, calls the cursor's
|
||||
:meth:`~Cursor.executescript` method with the given *sql_script*, and
|
||||
returns the cursor.
|
||||
|
||||
.. method:: create_function(name, num_params, func)
|
||||
|
||||
|
@ -488,10 +489,6 @@ Connection Objects
|
|||
:mod:`sqlite3` module will return Unicode objects for ``TEXT``. If you want to
|
||||
return bytestrings instead, you can set it to :class:`bytes`.
|
||||
|
||||
For efficiency reasons, there's also a way to return :class:`str` objects
|
||||
only for non-ASCII data, and :class:`bytes` otherwise. To activate it, set
|
||||
this attribute to :const:`sqlite3.OptimizedUnicode`.
|
||||
|
||||
You can also set it to any other callable that accepts a single bytestring
|
||||
parameter and returns the resulting object.
|
||||
|
||||
|
@ -533,7 +530,7 @@ Cursor Objects
|
|||
|
||||
A :class:`Cursor` instance has the following attributes and methods.
|
||||
|
||||
.. method:: execute(sql, [parameters])
|
||||
.. method:: execute(sql[, parameters])
|
||||
|
||||
Executes an SQL statement. The SQL statement may be parameterized (i. e.
|
||||
placeholders instead of SQL literals). The :mod:`sqlite3` module supports two
|
||||
|
@ -545,7 +542,7 @@ Cursor Objects
|
|||
.. literalinclude:: ../includes/sqlite3/execute_1.py
|
||||
|
||||
:meth:`execute` will only execute a single SQL statement. If you try to execute
|
||||
more than one statement with it, it will raise a Warning. Use
|
||||
more than one statement with it, it will raise an ``sqlite3.Warning``. Use
|
||||
:meth:`executescript` if you want to execute multiple SQL statements with one
|
||||
call.
|
||||
|
||||
|
@ -553,8 +550,8 @@ Cursor Objects
|
|||
.. method:: executemany(sql, seq_of_parameters)
|
||||
|
||||
Executes an SQL command against all parameter sequences or mappings found in
|
||||
the sequence *sql*. The :mod:`sqlite3` module also allows using an
|
||||
:term:`iterator` yielding parameters instead of a sequence.
|
||||
the sequence *seq_of_parameters*. The :mod:`sqlite3` module also allows
|
||||
using an :term:`iterator` yielding parameters instead of a sequence.
|
||||
|
||||
.. literalinclude:: ../includes/sqlite3/executemany_1.py
|
||||
|
||||
|
@ -569,7 +566,7 @@ Cursor Objects
|
|||
at once. It issues a ``COMMIT`` statement first, then executes the SQL script it
|
||||
gets as a parameter.
|
||||
|
||||
*sql_script* can be an instance of :class:`str` or :class:`bytes`.
|
||||
*sql_script* can be an instance of :class:`str`.
|
||||
|
||||
Example:
|
||||
|
||||
|
|
|
@ -4360,9 +4360,10 @@ an (external) *definition* for a module named *foo* somewhere.)
|
|||
A special attribute of every module is :attr:`~object.__dict__`. This is the
|
||||
dictionary containing the module's symbol table. Modifying this dictionary will
|
||||
actually change the module's symbol table, but direct assignment to the
|
||||
:attr:`__dict__` attribute is not possible (you can write
|
||||
:attr:`~object.__dict__` attribute is not possible (you can write
|
||||
``m.__dict__['a'] = 1``, which defines ``m.a`` to be ``1``, but you can't write
|
||||
``m.__dict__ = {}``). Modifying :attr:`__dict__` directly is not recommended.
|
||||
``m.__dict__ = {}``). Modifying :attr:`~object.__dict__` directly is
|
||||
not recommended.
|
||||
|
||||
Modules built into the interpreter are written like this: ``<module 'sys'
|
||||
(built-in)>``. If loaded from a file, they are written as ``<module 'os' from
|
||||
|
@ -4575,14 +4576,16 @@ types, where they are relevant. Some of these are not reported by the
|
|||
The tuple of base classes of a class object.
|
||||
|
||||
|
||||
.. attribute:: class.__name__
|
||||
.. attribute:: definition.__name__
|
||||
|
||||
The name of the class or type.
|
||||
The name of the class, function, method, descriptor, or
|
||||
generator instance.
|
||||
|
||||
|
||||
.. attribute:: class.__qualname__
|
||||
.. attribute:: definition.__qualname__
|
||||
|
||||
The :term:`qualified name` of the class or type.
|
||||
The :term:`qualified name` of the class, function, method, descriptor,
|
||||
or generator instance.
|
||||
|
||||
.. versionadded:: 3.3
|
||||
|
||||
|
|
|
@ -64,19 +64,19 @@ Some facts and figures:
|
|||
| ``'x'`` or | Create a tarfile exclusively without |
|
||||
| ``'x:'`` | compression. |
|
||||
| | Raise an :exc:`FileExistsError` exception |
|
||||
| | if it is already exists. |
|
||||
| | if it already exists. |
|
||||
+------------------+---------------------------------------------+
|
||||
| ``'x:gz'`` | Create a tarfile with gzip compression. |
|
||||
| | Raise an :exc:`FileExistsError` exception |
|
||||
| | if it is already exists. |
|
||||
| | if it already exists. |
|
||||
+------------------+---------------------------------------------+
|
||||
| ``'x:bz2'`` | Create a tarfile with bzip2 compression. |
|
||||
| | Raise an :exc:`FileExistsError` exception |
|
||||
| | if it is already exists. |
|
||||
| | if it already exists. |
|
||||
+------------------+---------------------------------------------+
|
||||
| ``'x:xz'`` | Create a tarfile with lzma compression. |
|
||||
| | Raise an :exc:`FileExistsError` exception |
|
||||
| | if it is already exists. |
|
||||
| | if it already exists. |
|
||||
+------------------+---------------------------------------------+
|
||||
| ``'a' or 'a:'`` | Open for appending with no compression. The |
|
||||
| | file is created if it does not exist. |
|
||||
|
@ -148,8 +148,8 @@ Some facts and figures:
|
|||
|
||||
.. class:: TarFile
|
||||
|
||||
Class for reading and writing tar archives. Do not use this class directly,
|
||||
better use :func:`tarfile.open` instead. See :ref:`tarfile-objects`.
|
||||
Class for reading and writing tar archives. Do not use this class directly:
|
||||
use :func:`tarfile.open` instead. See :ref:`tarfile-objects`.
|
||||
|
||||
|
||||
.. function:: is_tarfile(name)
|
||||
|
@ -271,7 +271,7 @@ be finalized; only the internally used file object will be closed. See the
|
|||
|
||||
*mode* is either ``'r'`` to read from an existing archive, ``'a'`` to append
|
||||
data to an existing file, ``'w'`` to create a new file overwriting an existing
|
||||
one or ``'x'`` to create a new file only if it's not exists.
|
||||
one, or ``'x'`` to create a new file only if it does not already exist.
|
||||
|
||||
If *fileobj* is given, it is used for reading or writing data. If it can be
|
||||
determined, *mode* is overridden by *fileobj*'s mode. *fileobj* will be used
|
||||
|
|
|
@ -250,7 +250,7 @@ ZipFile Objects
|
|||
.. method:: ZipFile.extract(member, path=None, pwd=None)
|
||||
|
||||
Extract a member from the archive to the current working directory; *member*
|
||||
must be its full name or a :class:`ZipInfo` object). Its file information is
|
||||
must be its full name or a :class:`ZipInfo` object. Its file information is
|
||||
extracted as accurately as possible. *path* specifies a different directory
|
||||
to extract to. *member* can be a filename or a :class:`ZipInfo` object.
|
||||
*pwd* is the password used for encrypted files.
|
||||
|
@ -343,9 +343,9 @@ ZipFile Objects
|
|||
If ``arcname`` (or ``filename``, if ``arcname`` is not given) contains a null
|
||||
byte, the name of the file in the archive will be truncated at the null byte.
|
||||
|
||||
.. method:: ZipFile.writestr(zinfo_or_arcname, bytes[, compress_type])
|
||||
.. method:: ZipFile.writestr(zinfo_or_arcname, data[, compress_type])
|
||||
|
||||
Write the string *bytes* to the archive; *zinfo_or_arcname* is either the file
|
||||
Write the string *data* to the archive; *zinfo_or_arcname* is either the file
|
||||
name it will be given in the archive, or a :class:`ZipInfo` instance. If it's
|
||||
an instance, at least the filename, date, and time must be given. If it's a
|
||||
name, the date and time is set to the current date and time.
|
||||
|
|
|
@ -454,6 +454,19 @@ Callable types
|
|||
|
||||
.. tabularcolumns:: |l|L|l|
|
||||
|
||||
.. index::
|
||||
single: __doc__ (function attribute)
|
||||
single: __name__ (function attribute)
|
||||
single: __module__ (function attribute)
|
||||
single: __dict__ (function attribute)
|
||||
single: __defaults__ (function attribute)
|
||||
single: __closure__ (function attribute)
|
||||
single: __code__ (function attribute)
|
||||
single: __globals__ (function attribute)
|
||||
single: __annotations__ (function attribute)
|
||||
single: __kwdefaults__ (function attribute)
|
||||
pair: global; namespace
|
||||
|
||||
+-------------------------+-------------------------------+-----------+
|
||||
| Attribute | Meaning | |
|
||||
+=========================+===============================+===========+
|
||||
|
@ -462,10 +475,11 @@ Callable types
|
|||
| | unavailable; not inherited by | |
|
||||
| | subclasses | |
|
||||
+-------------------------+-------------------------------+-----------+
|
||||
| :attr:`__name__` | The function's name | Writable |
|
||||
| :attr:`~definition.\ | The function's name | Writable |
|
||||
| __name__` | | |
|
||||
+-------------------------+-------------------------------+-----------+
|
||||
| :attr:`__qualname__` | The function's | Writable |
|
||||
| | :term:`qualified name` | |
|
||||
| :attr:`~definition.\ | The function's | Writable |
|
||||
| __qualname__` | :term:`qualified name` | |
|
||||
| | | |
|
||||
| | .. versionadded:: 3.3 | |
|
||||
+-------------------------+-------------------------------+-----------+
|
||||
|
@ -489,7 +503,7 @@ Callable types
|
|||
| | module in which the function | |
|
||||
| | was defined. | |
|
||||
+-------------------------+-------------------------------+-----------+
|
||||
| :attr:`__dict__` | The namespace supporting | Writable |
|
||||
| :attr:`~object.__dict__`| The namespace supporting | Writable |
|
||||
| | arbitrary function | |
|
||||
| | attributes. | |
|
||||
+-------------------------+-------------------------------+-----------+
|
||||
|
@ -519,19 +533,6 @@ Callable types
|
|||
Additional information about a function's definition can be retrieved from its
|
||||
code object; see the description of internal types below.
|
||||
|
||||
.. index::
|
||||
single: __doc__ (function attribute)
|
||||
single: __name__ (function attribute)
|
||||
single: __module__ (function attribute)
|
||||
single: __dict__ (function attribute)
|
||||
single: __defaults__ (function attribute)
|
||||
single: __closure__ (function attribute)
|
||||
single: __code__ (function attribute)
|
||||
single: __globals__ (function attribute)
|
||||
single: __annotations__ (function attribute)
|
||||
single: __kwdefaults__ (function attribute)
|
||||
pair: global; namespace
|
||||
|
||||
Instance methods
|
||||
.. index::
|
||||
object: method
|
||||
|
@ -550,7 +551,7 @@ Callable types
|
|||
|
||||
Special read-only attributes: :attr:`__self__` is the class instance object,
|
||||
:attr:`__func__` is the function object; :attr:`__doc__` is the method's
|
||||
documentation (same as ``__func__.__doc__``); :attr:`__name__` is the
|
||||
documentation (same as ``__func__.__doc__``); :attr:`~definition.__name__` is the
|
||||
method name (same as ``__func__.__name__``); :attr:`__module__` is the
|
||||
name of the module the method was defined in, or ``None`` if unavailable.
|
||||
|
||||
|
@ -637,7 +638,7 @@ Callable types
|
|||
standard built-in module). The number and type of the arguments are
|
||||
determined by the C function. Special read-only attributes:
|
||||
:attr:`__doc__` is the function's documentation string, or ``None`` if
|
||||
unavailable; :attr:`__name__` is the function's name; :attr:`__self__` is
|
||||
unavailable; :attr:`~definition.__name__` is the function's name; :attr:`__self__` is
|
||||
set to ``None`` (but see the next item); :attr:`__module__` is the name of
|
||||
the module the function was defined in or ``None`` if unavailable.
|
||||
|
||||
|
@ -687,7 +688,7 @@ Modules
|
|||
|
||||
.. index:: single: __dict__ (module attribute)
|
||||
|
||||
Special read-only attribute: :attr:`__dict__` is the module's namespace as a
|
||||
Special read-only attribute: :attr:`~object.__dict__` is the module's namespace as a
|
||||
dictionary object.
|
||||
|
||||
.. impl-detail::
|
||||
|
@ -743,7 +744,7 @@ Custom classes
|
|||
method object, it is transformed into the object wrapped by the static method
|
||||
object. See section :ref:`descriptors` for another way in which attributes
|
||||
retrieved from a class may differ from those actually contained in its
|
||||
:attr:`__dict__`.
|
||||
:attr:`~object.__dict__`.
|
||||
|
||||
.. index:: triple: class; attribute; assignment
|
||||
|
||||
|
@ -761,8 +762,8 @@ Custom classes
|
|||
single: __bases__ (class attribute)
|
||||
single: __doc__ (class attribute)
|
||||
|
||||
Special attributes: :attr:`__name__` is the class name; :attr:`__module__` is
|
||||
the module name in which the class was defined; :attr:`__dict__` is the
|
||||
Special attributes: :attr:`~definition.__name__` is the class name; :attr:`__module__` is
|
||||
the module name in which the class was defined; :attr:`~object.__dict__` is the
|
||||
dictionary containing the class's namespace; :attr:`~class.__bases__` is a
|
||||
tuple (possibly empty or a singleton) containing the base classes, in the
|
||||
order of their occurrence in the base class list; :attr:`__doc__` is the
|
||||
|
@ -785,7 +786,7 @@ Class instances
|
|||
class method objects are also transformed; see above under "Classes". See
|
||||
section :ref:`descriptors` for another way in which attributes of a class
|
||||
retrieved via its instances may differ from the objects actually stored in
|
||||
the class's :attr:`__dict__`. If no class attribute is found, and the
|
||||
the class's :attr:`~object.__dict__`. If no class attribute is found, and the
|
||||
object's class has a :meth:`__getattr__` method, that is called to satisfy
|
||||
the lookup.
|
||||
|
||||
|
@ -1466,7 +1467,7 @@ method (a so-called *descriptor* class) appears in an *owner* class (the
|
|||
descriptor must be in either the owner's class dictionary or in the class
|
||||
dictionary for one of its parents). In the examples below, "the attribute"
|
||||
refers to the attribute whose name is the key of the property in the owner
|
||||
class' :attr:`__dict__`.
|
||||
class' :attr:`~object.__dict__`.
|
||||
|
||||
|
||||
.. method:: object.__get__(self, instance, owner)
|
||||
|
|
|
@ -270,5 +270,5 @@ class SuspiciousVisitor(nodes.GenericNodeVisitor):
|
|||
# ignore comments -- too much false positives.
|
||||
# (although doing this could miss some errors;
|
||||
# there were two sections "commented-out" by mistake
|
||||
# in the Python docs that would not be catched)
|
||||
# in the Python docs that would not be caught)
|
||||
raise nodes.SkipNode
|
||||
|
|
|
@ -951,8 +951,8 @@ Examples::
|
|||
.. rubric:: Footnotes
|
||||
|
||||
.. [#] Except for one thing. Module objects have a secret read-only attribute called
|
||||
:attr:`__dict__` which returns the dictionary used to implement the module's
|
||||
namespace; the name :attr:`__dict__` is an attribute but not a global name.
|
||||
:attr:`~object.__dict__` which returns the dictionary used to implement the module's
|
||||
namespace; the name :attr:`~object.__dict__` is an attribute but not a global name.
|
||||
Obviously, using this violates the abstraction of namespace implementation, and
|
||||
should be restricted to things like post-mortem debuggers.
|
||||
|
||||
|
|
|
@ -506,7 +506,7 @@ arguments and/or a dictionary of keyword arguments. In Python 1.5 and earlier,
|
|||
you'd use the :func:`apply` built-in function: ``apply(f, args, kw)`` calls the
|
||||
function :func:`f` with the argument tuple *args* and the keyword arguments in
|
||||
the dictionary *kw*. :func:`apply` is the same in 2.0, but thanks to a patch
|
||||
from Greg Ewing, ``f(*args, **kw)`` as a shorter and clearer way to achieve the
|
||||
from Greg Ewing, ``f(*args, **kw)`` is a shorter and clearer way to achieve the
|
||||
same effect. This syntax is symmetrical with the syntax for defining
|
||||
functions::
|
||||
|
||||
|
|
|
@ -442,8 +442,8 @@ Python syntax::
|
|||
f.grammar = "A ::= B (C D)*"
|
||||
|
||||
The dictionary containing attributes can be accessed as the function's
|
||||
:attr:`__dict__`. Unlike the :attr:`__dict__` attribute of class instances, in
|
||||
functions you can actually assign a new dictionary to :attr:`__dict__`, though
|
||||
:attr:`~object.__dict__`. Unlike the :attr:`~object.__dict__` attribute of class instances, in
|
||||
functions you can actually assign a new dictionary to :attr:`~object.__dict__`, though
|
||||
the new value is restricted to a regular Python dictionary; you *can't* be
|
||||
tricky and set it to a :class:`UserDict` instance, or any other random object
|
||||
that behaves like a mapping.
|
||||
|
|
|
@ -157,7 +157,7 @@ attributes and methods were supported by an object. There were some informal
|
|||
conventions, such as defining :attr:`__members__` and :attr:`__methods__`
|
||||
attributes that were lists of names, but often the author of an extension type
|
||||
or a class wouldn't bother to define them. You could fall back on inspecting
|
||||
the :attr:`__dict__` of an object, but when class inheritance or an arbitrary
|
||||
the :attr:`~object.__dict__` of an object, but when class inheritance or an arbitrary
|
||||
:meth:`__getattr__` hook were in use this could still be inaccurate.
|
||||
|
||||
The one big idea underlying the new class model is that an API for describing
|
||||
|
@ -169,7 +169,7 @@ possible, as well as more exotic constructs.
|
|||
Attribute descriptors are objects that live inside class objects, and have a few
|
||||
attributes of their own:
|
||||
|
||||
* :attr:`__name__` is the attribute's name.
|
||||
* :attr:`~definition.__name__` is the attribute's name.
|
||||
|
||||
* :attr:`__doc__` is the attribute's docstring.
|
||||
|
||||
|
@ -329,7 +329,7 @@ However, Python 2.2's support for :dfn:`properties` will often be a simpler way
|
|||
to trap attribute references. Writing a :meth:`__getattr__` method is
|
||||
complicated because to avoid recursion you can't use regular attribute accesses
|
||||
inside them, and instead have to mess around with the contents of
|
||||
:attr:`__dict__`. :meth:`__getattr__` methods also end up being called by Python
|
||||
:attr:`~object.__dict__`. :meth:`__getattr__` methods also end up being called by Python
|
||||
when it checks for other methods such as :meth:`__repr__` or :meth:`__coerce__`,
|
||||
and so have to be written with this in mind. Finally, calling a function on
|
||||
every attribute access results in a sizable performance loss.
|
||||
|
@ -357,15 +357,15 @@ write::
|
|||
That is certainly clearer and easier to write than a pair of
|
||||
:meth:`__getattr__`/:meth:`__setattr__` methods that check for the :attr:`size`
|
||||
attribute and handle it specially while retrieving all other attributes from the
|
||||
instance's :attr:`__dict__`. Accesses to :attr:`size` are also the only ones
|
||||
instance's :attr:`~object.__dict__`. Accesses to :attr:`size` are also the only ones
|
||||
which have to perform the work of calling a function, so references to other
|
||||
attributes run at their usual speed.
|
||||
|
||||
Finally, it's possible to constrain the list of attributes that can be
|
||||
referenced on an object using the new :attr:`__slots__` class attribute. Python
|
||||
referenced on an object using the new :attr:`~object.__slots__` class attribute. Python
|
||||
objects are usually very dynamic; at any time it's possible to define a new
|
||||
attribute on an instance by just doing ``obj.new_attr=1``. A new-style class
|
||||
can define a class attribute named :attr:`__slots__` to limit the legal
|
||||
can define a class attribute named :attr:`~object.__slots__` to limit the legal
|
||||
attributes to a particular set of names. An example will make this clear::
|
||||
|
||||
>>> class C(object):
|
||||
|
@ -383,7 +383,7 @@ attributes to a particular set of names. An example will make this clear::
|
|||
AttributeError: 'C' object has no attribute 'newattr'
|
||||
|
||||
Note how you get an :exc:`AttributeError` on the attempt to assign to an
|
||||
attribute not listed in :attr:`__slots__`.
|
||||
attribute not listed in :attr:`~object.__slots__`.
|
||||
|
||||
|
||||
.. _sect-rellinks:
|
||||
|
|
|
@ -1111,10 +1111,10 @@ Here are all of the changes that Python 2.3 makes to the core Python language.
|
|||
<type '_socket.socket'>
|
||||
|
||||
* One of the noted incompatibilities between old- and new-style classes has been
|
||||
removed: you can now assign to the :attr:`__name__` and :attr:`__bases__`
|
||||
removed: you can now assign to the :attr:`~definition.__name__` and :attr:`~class.__bases__`
|
||||
attributes of new-style classes. There are some restrictions on what can be
|
||||
assigned to :attr:`__bases__` along the lines of those relating to assigning to
|
||||
an instance's :attr:`__class__` attribute.
|
||||
assigned to :attr:`~class.__bases__` along the lines of those relating to assigning to
|
||||
an instance's :attr:`~instance.__class__` attribute.
|
||||
|
||||
.. ======================================================================
|
||||
|
||||
|
@ -1920,7 +1920,7 @@ Changes to Python's build process and to the C API include:
|
|||
|
||||
* If you dynamically allocate type objects in your extension, you should be
|
||||
aware of a change in the rules relating to the :attr:`__module__` and
|
||||
:attr:`__name__` attributes. In summary, you will want to ensure the type's
|
||||
:attr:`~definition.__name__` attributes. In summary, you will want to ensure the type's
|
||||
dictionary contains a ``'__module__'`` key; making the module name the part of
|
||||
the type name leading up to the final period will no longer have the desired
|
||||
effect. For more detail, read the API reference documentation or the source.
|
||||
|
|
|
@ -783,8 +783,8 @@ Operators And Special Methods
|
|||
:attr:`func_closure`, :attr:`func_code`, :attr:`func_defaults`,
|
||||
:attr:`func_dict`, :attr:`func_doc`, :attr:`func_globals`,
|
||||
:attr:`func_name` were renamed to :attr:`__closure__`,
|
||||
:attr:`__code__`, :attr:`__defaults__`, :attr:`__dict__`,
|
||||
:attr:`__doc__`, :attr:`__globals__`, :attr:`__name__`,
|
||||
:attr:`__code__`, :attr:`__defaults__`, :attr:`~object.__dict__`,
|
||||
:attr:`__doc__`, :attr:`__globals__`, :attr:`~definition.__name__`,
|
||||
respectively.
|
||||
|
||||
* :meth:`__nonzero__` is now :meth:`__bool__`.
|
||||
|
|
|
@ -82,6 +82,9 @@ class _GeneratorContextManager(ContextDecorator):
|
|||
# raised inside the "with" statement from being suppressed.
|
||||
return exc is not value
|
||||
except RuntimeError as exc:
|
||||
# Don't re-raise the passed in exception. (issue27112)
|
||||
if exc is value:
|
||||
return False
|
||||
# Likewise, avoid suppressing if a StopIteration exception
|
||||
# was passed to throw() and later wrapped into a RuntimeError
|
||||
# (see PEP 479).
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import unittest
|
||||
import os
|
||||
import os, os.path
|
||||
import sys
|
||||
import test.support
|
||||
from ctypes import *
|
||||
|
@ -64,6 +64,11 @@ class Test_OpenGL_libs(unittest.TestCase):
|
|||
self.skipTest('lib_gle not available')
|
||||
self.gle.gleGetJoinStyle
|
||||
|
||||
def test_shell_injection(self):
|
||||
result = find_library('; echo Hello shell > ' + test.support.TESTFN)
|
||||
self.assertFalse(os.path.lexists(test.support.TESTFN))
|
||||
self.assertIsNone(result)
|
||||
|
||||
# On platforms where the default shared library suffix is '.so',
|
||||
# at least some libraries can be loaded as attributes of the cdll
|
||||
# object, since ctypes now tries loading the lib again
|
||||
|
|
|
@ -227,10 +227,10 @@ class StructureTestCase(unittest.TestCase):
|
|||
|
||||
def test_conflicting_initializers(self):
|
||||
class POINT(Structure):
|
||||
_fields_ = [("x", c_int), ("y", c_int)]
|
||||
_fields_ = [("phi", c_float), ("rho", c_float)]
|
||||
# conflicting positional and keyword args
|
||||
self.assertRaises(TypeError, POINT, 2, 3, x=4)
|
||||
self.assertRaises(TypeError, POINT, 2, 3, y=4)
|
||||
self.assertRaisesRegex(TypeError, "phi", POINT, 2, 3, phi=4)
|
||||
self.assertRaisesRegex(TypeError, "rho", POINT, 2, 3, rho=4)
|
||||
|
||||
# too many initializers
|
||||
self.assertRaises(TypeError, POINT, 2, 3, 4)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import sys, os
|
||||
import contextlib
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# find_library(name) returns the pathname of a library, or None.
|
||||
if os.name == "nt":
|
||||
|
@ -94,28 +95,46 @@ elif os.name == "posix":
|
|||
import re, tempfile
|
||||
|
||||
def _findLib_gcc(name):
|
||||
expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name)
|
||||
fdout, ccout = tempfile.mkstemp()
|
||||
os.close(fdout)
|
||||
cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; elif type cc >/dev/null 2>&1; then CC=cc;else exit 10; fi;' \
|
||||
'LANG=C LC_ALL=C $CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name
|
||||
# Run GCC's linker with the -t (aka --trace) option and examine the
|
||||
# library name it prints out. The GCC command will fail because we
|
||||
# haven't supplied a proper program with main(), but that does not
|
||||
# matter.
|
||||
expr = os.fsencode(r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name))
|
||||
|
||||
c_compiler = shutil.which('gcc')
|
||||
if not c_compiler:
|
||||
c_compiler = shutil.which('cc')
|
||||
if not c_compiler:
|
||||
# No C compiler available, give up
|
||||
return None
|
||||
|
||||
temp = tempfile.NamedTemporaryFile()
|
||||
try:
|
||||
f = os.popen(cmd)
|
||||
args = [c_compiler, '-Wl,-t', '-o', temp.name, '-l' + name]
|
||||
|
||||
env = dict(os.environ)
|
||||
env['LC_ALL'] = 'C'
|
||||
env['LANG'] = 'C'
|
||||
try:
|
||||
trace = f.read()
|
||||
finally:
|
||||
rv = f.close()
|
||||
proc = subprocess.Popen(args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
env=env)
|
||||
except OSError: # E.g. bad executable
|
||||
return None
|
||||
with proc:
|
||||
trace = proc.stdout.read()
|
||||
finally:
|
||||
try:
|
||||
os.unlink(ccout)
|
||||
temp.close()
|
||||
except FileNotFoundError:
|
||||
# Raised if the file was already removed, which is the normal
|
||||
# behaviour of GCC if linking fails
|
||||
pass
|
||||
if rv == 10:
|
||||
raise OSError('gcc or cc command not found')
|
||||
res = re.search(expr, trace)
|
||||
if not res:
|
||||
return None
|
||||
return res.group(0)
|
||||
return os.fsdecode(res.group(0))
|
||||
|
||||
|
||||
if sys.platform == "sunos5":
|
||||
|
@ -123,55 +142,75 @@ elif os.name == "posix":
|
|||
def _get_soname(f):
|
||||
if not f:
|
||||
return None
|
||||
cmd = "/usr/ccs/bin/dump -Lpv 2>/dev/null " + f
|
||||
with contextlib.closing(os.popen(cmd)) as f:
|
||||
data = f.read()
|
||||
res = re.search(r'\[.*\]\sSONAME\s+([^\s]+)', data)
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(("/usr/ccs/bin/dump", "-Lpv", f),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL)
|
||||
except OSError: # E.g. command not found
|
||||
return None
|
||||
with proc:
|
||||
data = proc.stdout.read()
|
||||
res = re.search(br'\[.*\]\sSONAME\s+([^\s]+)', data)
|
||||
if not res:
|
||||
return None
|
||||
return res.group(1)
|
||||
return os.fsdecode(res.group(1))
|
||||
else:
|
||||
def _get_soname(f):
|
||||
# assuming GNU binutils / ELF
|
||||
if not f:
|
||||
return None
|
||||
cmd = 'if ! type objdump >/dev/null 2>&1; then exit 10; fi;' \
|
||||
"objdump -p -j .dynamic 2>/dev/null " + f
|
||||
f = os.popen(cmd)
|
||||
objdump = shutil.which('objdump')
|
||||
if not objdump:
|
||||
# objdump is not available, give up
|
||||
return None
|
||||
|
||||
try:
|
||||
dump = f.read()
|
||||
finally:
|
||||
rv = f.close()
|
||||
if rv == 10:
|
||||
raise OSError('objdump command not found')
|
||||
res = re.search(r'\sSONAME\s+([^\s]+)', dump)
|
||||
proc = subprocess.Popen((objdump, '-p', '-j', '.dynamic', f),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL)
|
||||
except OSError: # E.g. bad executable
|
||||
return None
|
||||
with proc:
|
||||
dump = proc.stdout.read()
|
||||
res = re.search(br'\sSONAME\s+([^\s]+)', dump)
|
||||
if not res:
|
||||
return None
|
||||
return res.group(1)
|
||||
return os.fsdecode(res.group(1))
|
||||
|
||||
if sys.platform.startswith(("freebsd", "openbsd", "dragonfly")):
|
||||
|
||||
def _num_version(libname):
|
||||
# "libxyz.so.MAJOR.MINOR" => [ MAJOR, MINOR ]
|
||||
parts = libname.split(".")
|
||||
parts = libname.split(b".")
|
||||
nums = []
|
||||
try:
|
||||
while parts:
|
||||
nums.insert(0, int(parts.pop()))
|
||||
except ValueError:
|
||||
pass
|
||||
return nums or [ sys.maxsize ]
|
||||
return nums or [sys.maxsize]
|
||||
|
||||
def find_library(name):
|
||||
ename = re.escape(name)
|
||||
expr = r':-l%s\.\S+ => \S*/(lib%s\.\S+)' % (ename, ename)
|
||||
with contextlib.closing(os.popen('/sbin/ldconfig -r 2>/dev/null')) as f:
|
||||
data = f.read()
|
||||
expr = os.fsencode(expr)
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(('/sbin/ldconfig', '-r'),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL)
|
||||
except OSError: # E.g. command not found
|
||||
data = b''
|
||||
else:
|
||||
with proc:
|
||||
data = proc.stdout.read()
|
||||
|
||||
res = re.findall(expr, data)
|
||||
if not res:
|
||||
return _get_soname(_findLib_gcc(name))
|
||||
res.sort(key=_num_version)
|
||||
return res[-1]
|
||||
return os.fsdecode(res[-1])
|
||||
|
||||
elif sys.platform == "sunos5":
|
||||
|
||||
|
@ -179,17 +218,27 @@ elif os.name == "posix":
|
|||
if not os.path.exists('/usr/bin/crle'):
|
||||
return None
|
||||
|
||||
env = dict(os.environ)
|
||||
env['LC_ALL'] = 'C'
|
||||
|
||||
if is64:
|
||||
cmd = 'env LC_ALL=C /usr/bin/crle -64 2>/dev/null'
|
||||
args = ('/usr/bin/crle', '-64')
|
||||
else:
|
||||
cmd = 'env LC_ALL=C /usr/bin/crle 2>/dev/null'
|
||||
args = ('/usr/bin/crle',)
|
||||
|
||||
paths = None
|
||||
with contextlib.closing(os.popen(cmd)) as f:
|
||||
for line in f.readlines():
|
||||
try:
|
||||
proc = subprocess.Popen(args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL,
|
||||
env=env)
|
||||
except OSError: # E.g. bad executable
|
||||
return None
|
||||
with proc:
|
||||
for line in proc.stdout:
|
||||
line = line.strip()
|
||||
if line.startswith('Default Library Path (ELF):'):
|
||||
paths = line.split()[4]
|
||||
if line.startswith(b'Default Library Path (ELF):'):
|
||||
paths = os.fsdecode(line).split()[4]
|
||||
|
||||
if not paths:
|
||||
return None
|
||||
|
|
|
@ -54,13 +54,13 @@ def _ctoi(c):
|
|||
def isalnum(c): return isalpha(c) or isdigit(c)
|
||||
def isalpha(c): return isupper(c) or islower(c)
|
||||
def isascii(c): return _ctoi(c) <= 127 # ?
|
||||
def isblank(c): return _ctoi(c) in (8,32)
|
||||
def iscntrl(c): return _ctoi(c) <= 31
|
||||
def isblank(c): return _ctoi(c) in (9, 32)
|
||||
def iscntrl(c): return _ctoi(c) <= 31 or _ctoi(c) == 127
|
||||
def isdigit(c): return _ctoi(c) >= 48 and _ctoi(c) <= 57
|
||||
def isgraph(c): return _ctoi(c) >= 33 and _ctoi(c) <= 126
|
||||
def islower(c): return _ctoi(c) >= 97 and _ctoi(c) <= 122
|
||||
def isprint(c): return _ctoi(c) >= 32 and _ctoi(c) <= 126
|
||||
def ispunct(c): return _ctoi(c) != 32 and not isalnum(c)
|
||||
def ispunct(c): return isgraph(c) and not isalnum(c)
|
||||
def isspace(c): return _ctoi(c) in (9, 10, 11, 12, 13, 32)
|
||||
def isupper(c): return _ctoi(c) >= 65 and _ctoi(c) <= 90
|
||||
def isxdigit(c): return isdigit(c) or \
|
||||
|
|
|
@ -86,11 +86,9 @@ def _get_vc_env(plat_spec):
|
|||
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
'"{}" {} && set'.format(vcvarsall, plat_spec),
|
||||
shell=True,
|
||||
'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
)
|
||||
).decode('utf-16le', errors='replace')
|
||||
except subprocess.CalledProcessError as exc:
|
||||
log.error(exc.output)
|
||||
raise DistutilsPlatformError("Error executing {}"
|
||||
|
|
|
@ -91,7 +91,7 @@ class upload(PyPIRCCommand):
|
|||
data = {
|
||||
# action
|
||||
':action': 'file_upload',
|
||||
'protcol_version': '1',
|
||||
'protocol_version': '1',
|
||||
|
||||
# identify release
|
||||
'name': meta.get_name(),
|
||||
|
|
|
@ -18,6 +18,7 @@ PYPIRC = """\
|
|||
index-servers =
|
||||
server1
|
||||
server2
|
||||
server3
|
||||
|
||||
[server1]
|
||||
username:me
|
||||
|
@ -28,6 +29,10 @@ username:meagain
|
|||
password: secret
|
||||
realm:acme
|
||||
repository:http://another.pypi/
|
||||
|
||||
[server3]
|
||||
username:cbiggles
|
||||
password:yh^%#rest-of-my-password
|
||||
"""
|
||||
|
||||
PYPIRC_OLD = """\
|
||||
|
@ -47,14 +52,14 @@ password:xxx
|
|||
"""
|
||||
|
||||
|
||||
class PyPIRCCommandTestCase(support.TempdirManager,
|
||||
class BasePyPIRCCommandTestCase(support.TempdirManager,
|
||||
support.LoggingSilencer,
|
||||
support.EnvironGuard,
|
||||
unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
"""Patches the environment."""
|
||||
super(PyPIRCCommandTestCase, self).setUp()
|
||||
super(BasePyPIRCCommandTestCase, self).setUp()
|
||||
self.tmp_dir = self.mkdtemp()
|
||||
os.environ['HOME'] = self.tmp_dir
|
||||
self.rc = os.path.join(self.tmp_dir, '.pypirc')
|
||||
|
@ -73,7 +78,10 @@ class PyPIRCCommandTestCase(support.TempdirManager,
|
|||
def tearDown(self):
|
||||
"""Removes the patch."""
|
||||
set_threshold(self.old_threshold)
|
||||
super(PyPIRCCommandTestCase, self).tearDown()
|
||||
super(BasePyPIRCCommandTestCase, self).tearDown()
|
||||
|
||||
|
||||
class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase):
|
||||
|
||||
def test_server_registration(self):
|
||||
# This test makes sure PyPIRCCommand knows how to:
|
||||
|
@ -113,6 +121,20 @@ class PyPIRCCommandTestCase(support.TempdirManager,
|
|||
finally:
|
||||
f.close()
|
||||
|
||||
def test_config_interpolation(self):
|
||||
# using the % character in .pypirc should not raise an error (#20120)
|
||||
self.write_file(self.rc, PYPIRC)
|
||||
cmd = self._cmd(self.dist)
|
||||
cmd.repository = 'server3'
|
||||
config = cmd._read_pypirc()
|
||||
|
||||
config = list(sorted(config.items()))
|
||||
waited = [('password', 'yh^%#rest-of-my-password'), ('realm', 'pypi'),
|
||||
('repository', 'https://pypi.python.org/pypi'),
|
||||
('server', 'server3'), ('username', 'cbiggles')]
|
||||
self.assertEqual(config, waited)
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(PyPIRCCommandTestCase)
|
||||
|
||||
|
|
|
@ -83,6 +83,24 @@ class msvccompilerTestCase(support.TempdirManager,
|
|||
self.assertFalse(os.path.isfile(os.path.join(
|
||||
tempdir, os.path.basename(dll))))
|
||||
|
||||
def test_get_vc_env_unicode(self):
|
||||
import distutils._msvccompiler as _msvccompiler
|
||||
|
||||
test_var = 'ṰḖṤṪ┅ṼẨṜ'
|
||||
test_value = '₃⁴₅'
|
||||
|
||||
# Ensure we don't early exit from _get_vc_env
|
||||
old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
|
||||
os.environ[test_var] = test_value
|
||||
try:
|
||||
env = _msvccompiler._get_vc_env('x86')
|
||||
self.assertIn(test_var.lower(), env)
|
||||
self.assertEqual(test_value, env[test_var.lower()])
|
||||
finally:
|
||||
os.environ.pop(test_var)
|
||||
if old_distutils_use_sdk:
|
||||
os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(msvccompilerTestCase)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from distutils.command.register import register
|
|||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.log import INFO
|
||||
|
||||
from distutils.tests.test_config import PyPIRCCommandTestCase
|
||||
from distutils.tests.test_config import BasePyPIRCCommandTestCase
|
||||
|
||||
try:
|
||||
import docutils
|
||||
|
@ -72,7 +72,7 @@ class FakeOpener(object):
|
|||
}.get(name.lower(), default)
|
||||
|
||||
|
||||
class RegisterTestCase(PyPIRCCommandTestCase):
|
||||
class RegisterTestCase(BasePyPIRCCommandTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(RegisterTestCase, self).setUp()
|
||||
|
|
|
@ -23,7 +23,7 @@ except ImportError:
|
|||
|
||||
from distutils.command.sdist import sdist, show_formats
|
||||
from distutils.core import Distribution
|
||||
from distutils.tests.test_config import PyPIRCCommandTestCase
|
||||
from distutils.tests.test_config import BasePyPIRCCommandTestCase
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.spawn import find_executable
|
||||
from distutils.log import WARN
|
||||
|
@ -52,7 +52,7 @@ somecode%(sep)sdoc.dat
|
|||
somecode%(sep)sdoc.txt
|
||||
"""
|
||||
|
||||
class SDistTestCase(PyPIRCCommandTestCase):
|
||||
class SDistTestCase(BasePyPIRCCommandTestCase):
|
||||
|
||||
def setUp(self):
|
||||
# PyPIRCCommandTestCase creates a temp dir already
|
||||
|
|
|
@ -12,7 +12,7 @@ from distutils.core import Distribution
|
|||
from distutils.errors import DistutilsError
|
||||
from distutils.log import ERROR, INFO
|
||||
|
||||
from distutils.tests.test_config import PYPIRC, PyPIRCCommandTestCase
|
||||
from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase
|
||||
|
||||
PYPIRC_LONG_PASSWORD = """\
|
||||
[distutils]
|
||||
|
@ -66,7 +66,7 @@ class FakeOpen(object):
|
|||
return self.code
|
||||
|
||||
|
||||
class uploadTestCase(PyPIRCCommandTestCase):
|
||||
class uploadTestCase(BasePyPIRCCommandTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(uploadTestCase, self).setUp()
|
||||
|
@ -130,13 +130,14 @@ class uploadTestCase(PyPIRCCommandTestCase):
|
|||
|
||||
# what did we send ?
|
||||
headers = dict(self.last_open.req.headers)
|
||||
self.assertEqual(headers['Content-length'], '2161')
|
||||
self.assertEqual(headers['Content-length'], '2162')
|
||||
content_type = headers['Content-type']
|
||||
self.assertTrue(content_type.startswith('multipart/form-data'))
|
||||
self.assertEqual(self.last_open.req.get_method(), 'POST')
|
||||
expected_url = 'https://pypi.python.org/pypi'
|
||||
self.assertEqual(self.last_open.req.get_full_url(), expected_url)
|
||||
self.assertTrue(b'xxx' in self.last_open.req.data)
|
||||
self.assertIn(b'protocol_version', self.last_open.req.data)
|
||||
|
||||
# The PyPI response body was echoed
|
||||
results = self.get_logs(INFO)
|
||||
|
|
|
@ -1,6 +1,17 @@
|
|||
What's New in IDLE 3.5.3?
|
||||
=========================
|
||||
*Release date: 2017-01-01?*
|
||||
|
||||
- Issue #27365: Allow non-ascii chars in IDLE NEWS.txt, for contributor names.
|
||||
|
||||
- Issue #27245: IDLE: Cleanly delete custom themes and key bindings.
|
||||
Previously, when IDLE was started from a console or by import, a cascade
|
||||
of warnings was emitted. Patch by Serhiy Storchaka.
|
||||
|
||||
|
||||
What's New in IDLE 3.5.2?
|
||||
=========================
|
||||
*Release date: 2016-06-30?*
|
||||
*Release date: 2016-06-26*
|
||||
|
||||
- Issue #5124: Paste with text selected now replaces the selection on X11.
|
||||
This matches how paste works on Windows, Mac, most modern Linux apps,
|
||||
|
|
|
@ -145,5 +145,7 @@ class AboutDialog(Toplevel):
|
|||
self.destroy()
|
||||
|
||||
if __name__ == '__main__':
|
||||
import unittest
|
||||
unittest.main('idlelib.idle_test.test_helpabout', verbosity=2, exit=False)
|
||||
from idlelib.idle_test.htest import run
|
||||
run(AboutDialog)
|
||||
|
|
|
@ -751,6 +751,7 @@ class ConfigDialog(Toplevel):
|
|||
if not tkMessageBox.askyesno(
|
||||
'Delete Key Set', delmsg % keySetName, parent=self):
|
||||
return
|
||||
self.DeactivateCurrentConfig()
|
||||
#remove key set from config
|
||||
idleConf.userCfg['keys'].remove_section(keySetName)
|
||||
if keySetName in self.changedItems['keys']:
|
||||
|
@ -769,7 +770,8 @@ class ConfigDialog(Toplevel):
|
|||
self.keysAreBuiltin.set(idleConf.defaultCfg['main'].Get('Keys', 'default'))
|
||||
self.builtinKeys.set(idleConf.defaultCfg['main'].Get('Keys', 'name'))
|
||||
#user can't back out of these changes, they must be applied now
|
||||
self.Apply()
|
||||
self.SaveAllChangedConfigs()
|
||||
self.ActivateConfigChanges()
|
||||
self.SetKeysType()
|
||||
|
||||
def DeleteCustomTheme(self):
|
||||
|
@ -778,6 +780,7 @@ class ConfigDialog(Toplevel):
|
|||
if not tkMessageBox.askyesno(
|
||||
'Delete Theme', delmsg % themeName, parent=self):
|
||||
return
|
||||
self.DeactivateCurrentConfig()
|
||||
#remove theme from config
|
||||
idleConf.userCfg['highlight'].remove_section(themeName)
|
||||
if themeName in self.changedItems['highlight']:
|
||||
|
@ -796,7 +799,8 @@ class ConfigDialog(Toplevel):
|
|||
self.themeIsBuiltin.set(idleConf.defaultCfg['main'].Get('Theme', 'default'))
|
||||
self.builtinTheme.set(idleConf.defaultCfg['main'].Get('Theme', 'name'))
|
||||
#user can't back out of these changes, they must be applied now
|
||||
self.Apply()
|
||||
self.SaveAllChangedConfigs()
|
||||
self.ActivateConfigChanges()
|
||||
self.SetThemeType()
|
||||
|
||||
def GetColour(self):
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
'''Test idlelib.help_about.
|
||||
|
||||
Coverage:
|
||||
'''
|
||||
from idlelib import aboutDialog as help_about
|
||||
from idlelib import textView as textview
|
||||
from idlelib.idle_test.mock_idle import Func
|
||||
from idlelib.idle_test.mock_tk import Mbox
|
||||
import unittest
|
||||
|
||||
About = help_about.AboutDialog
|
||||
class Dummy_about_dialog():
|
||||
# Dummy class for testing file display functions.
|
||||
idle_credits = About.ShowIDLECredits
|
||||
idle_readme = About.ShowIDLEAbout
|
||||
idle_news = About.ShowIDLENEWS
|
||||
# Called by the above
|
||||
display_file_text = About.display_file_text
|
||||
|
||||
|
||||
class DisplayFileTest(unittest.TestCase):
|
||||
"Test that .txt files are found and properly decoded."
|
||||
dialog = Dummy_about_dialog()
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.orig_mbox = textview.tkMessageBox
|
||||
cls.orig_view = textview.view_text
|
||||
cls.mbox = Mbox()
|
||||
cls.view = Func()
|
||||
textview.tkMessageBox = cls.mbox
|
||||
textview.view_text = cls.view
|
||||
cls.About = Dummy_about_dialog()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
textview.tkMessageBox = cls.orig_mbox
|
||||
textview.view_text = cls.orig_view
|
||||
|
||||
def test_file_isplay(self):
|
||||
for handler in (self.dialog.idle_credits,
|
||||
self.dialog.idle_readme,
|
||||
self.dialog.idle_news):
|
||||
self.mbox.showerror.message = ''
|
||||
self.view.called = False
|
||||
handler()
|
||||
self.assertEqual(self.mbox.showerror.message, '')
|
||||
self.assertEqual(self.view.called, True)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(verbosity=2)
|
|
@ -76,6 +76,10 @@ def view_file(parent, title, filename, encoding=None, modal=True):
|
|||
tkMessageBox.showerror(title='File Load Error',
|
||||
message='Unable to load file %r .' % filename,
|
||||
parent=parent)
|
||||
except UnicodeDecodeError as err:
|
||||
tkMessageBox.showerror(title='Unicode Decode Error',
|
||||
message=str(err),
|
||||
parent=parent)
|
||||
else:
|
||||
return view_text(parent, title, contents, modal)
|
||||
|
||||
|
|
|
@ -223,6 +223,7 @@ _code_type = type(_write_atomic.__code__)
|
|||
# Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations)
|
||||
# Python 3.5b2 3340 (fix dictionary display evaluation order #11205)
|
||||
# Python 3.5b2 3350 (add GET_YIELD_FROM_ITER opcode #24400)
|
||||
# Python 3.5.2 3351 (fix BUILD_MAP_UNPACK_WITH_CALL opcode #27286)
|
||||
#
|
||||
# MAGIC must change whenever the bytecode emitted by the compiler may no
|
||||
# longer be understood by older implementations of the eval loop (usually
|
||||
|
@ -231,7 +232,7 @@ _code_type = type(_write_atomic.__code__)
|
|||
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
|
||||
# in PC/launcher.c must also be updated.
|
||||
|
||||
MAGIC_NUMBER = (3350).to_bytes(2, 'little') + b'\r\n'
|
||||
MAGIC_NUMBER = (3351).to_bytes(2, 'little') + b'\r\n'
|
||||
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
|
||||
|
||||
_PYCACHE = '__pycache__'
|
||||
|
|
|
@ -241,7 +241,7 @@ class _LazyModule(types.ModuleType):
|
|||
if id(self) != id(sys.modules[original_name]):
|
||||
msg = ('module object for {!r} substituted in sys.modules '
|
||||
'during a lazy load')
|
||||
raise ValueError(msg.format(original_name))
|
||||
raise ValueError(msg.format(original_name))
|
||||
# Update after loading since that's what would happen in an eager
|
||||
# loading situation.
|
||||
self.__dict__.update(attrs_updated)
|
||||
|
|
|
@ -9,6 +9,7 @@ import os
|
|||
import codecs
|
||||
import operator
|
||||
import io
|
||||
import re
|
||||
import tempfile
|
||||
import shutil
|
||||
import unittest
|
||||
|
@ -226,8 +227,8 @@ from __future__ import print_function"""
|
|||
actually_write=False)
|
||||
# Testing that it logged this message when write=False was passed is
|
||||
# sufficient to see that it did not bail early after "No changes".
|
||||
message_regex = r"Not writing changes to .*%s%s" % (
|
||||
os.sep, os.path.basename(test_file))
|
||||
message_regex = r"Not writing changes to .*%s" % \
|
||||
re.escape(os.sep + os.path.basename(test_file))
|
||||
for message in debug_messages:
|
||||
if "Not writing changes" in message:
|
||||
self.assertRegex(message, message_regex)
|
||||
|
|
|
@ -28,7 +28,7 @@ to a file named "<name>.html".
|
|||
|
||||
Module docs for core modules are assumed to be in
|
||||
|
||||
http://docs.python.org/X.Y/library/
|
||||
https://docs.python.org/X.Y/library/
|
||||
|
||||
This can be overridden by setting the PYTHONDOCS environment variable
|
||||
to a different URL or to a local directory containing the Library
|
||||
|
@ -395,6 +395,7 @@ class Doc:
|
|||
|
||||
docloc = os.environ.get("PYTHONDOCS", self.PYTHONDOCS)
|
||||
|
||||
basedir = os.path.normcase(basedir)
|
||||
if (isinstance(object, type(os)) and
|
||||
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
|
||||
'marshal', 'posix', 'signal', 'sys',
|
||||
|
@ -402,7 +403,7 @@ class Doc:
|
|||
(file.startswith(basedir) and
|
||||
not file.startswith(os.path.join(basedir, 'site-packages')))) and
|
||||
object.__name__ not in ('xml.etree', 'test.pydoc_mod')):
|
||||
if docloc.startswith("http://"):
|
||||
if docloc.startswith(("http://", "https://")):
|
||||
docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__.lower())
|
||||
else:
|
||||
docloc = os.path.join(docloc, object.__name__.lower() + ".html")
|
||||
|
|
|
@ -122,11 +122,8 @@ class ConnectionTests(unittest.TestCase):
|
|||
|
||||
def CheckFailedOpen(self):
|
||||
YOU_CANNOT_OPEN_THIS = "/foo/bar/bla/23534/mydb.db"
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
con = sqlite.connect(YOU_CANNOT_OPEN_THIS)
|
||||
except sqlite.OperationalError:
|
||||
return
|
||||
self.fail("should have raised an OperationalError")
|
||||
|
||||
def CheckClose(self):
|
||||
self.cx.close()
|
||||
|
@ -180,6 +177,12 @@ class ConnectionTests(unittest.TestCase):
|
|||
with self.assertRaises(sqlite.OperationalError):
|
||||
cx.execute('insert into test(id) values(1)')
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info >= (3, 3, 1),
|
||||
'needs sqlite versions older than 3.3.1')
|
||||
def CheckSameThreadErrorOnOldVersion(self):
|
||||
with self.assertRaises(sqlite.NotSupportedError) as cm:
|
||||
sqlite.connect(':memory:', check_same_thread=False)
|
||||
self.assertEqual(str(cm.exception), 'shared connections not available')
|
||||
|
||||
class CursorTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
@ -196,22 +199,12 @@ class CursorTests(unittest.TestCase):
|
|||
self.cu.execute("delete from test")
|
||||
|
||||
def CheckExecuteIllegalSql(self):
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.cu.execute("select asdf")
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError:
|
||||
return
|
||||
except:
|
||||
self.fail("raised wrong exception")
|
||||
|
||||
def CheckExecuteTooMuchSql(self):
|
||||
try:
|
||||
with self.assertRaises(sqlite.Warning):
|
||||
self.cu.execute("select 5+4; select 4+5")
|
||||
self.fail("should have raised a Warning")
|
||||
except sqlite.Warning:
|
||||
return
|
||||
except:
|
||||
self.fail("raised wrong exception")
|
||||
|
||||
def CheckExecuteTooMuchSql2(self):
|
||||
self.cu.execute("select 5+4; -- foo bar")
|
||||
|
@ -226,13 +219,8 @@ class CursorTests(unittest.TestCase):
|
|||
""")
|
||||
|
||||
def CheckExecuteWrongSqlArg(self):
|
||||
try:
|
||||
with self.assertRaises(ValueError):
|
||||
self.cu.execute(42)
|
||||
self.fail("should have raised a ValueError")
|
||||
except ValueError:
|
||||
return
|
||||
except:
|
||||
self.fail("raised wrong exception.")
|
||||
|
||||
def CheckExecuteArgInt(self):
|
||||
self.cu.execute("insert into test(id) values (?)", (42,))
|
||||
|
@ -250,29 +238,25 @@ class CursorTests(unittest.TestCase):
|
|||
row = self.cu.fetchone()
|
||||
self.assertEqual(row[0], "Hu\x00go")
|
||||
|
||||
def CheckExecuteNonIterable(self):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self.cu.execute("insert into test(id) values (?)", 42)
|
||||
self.assertEqual(str(cm.exception), 'parameters are of unsupported type')
|
||||
|
||||
def CheckExecuteWrongNoOfArgs1(self):
|
||||
# too many parameters
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("insert into test(id) values (?)", (17, "Egon"))
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
|
||||
def CheckExecuteWrongNoOfArgs2(self):
|
||||
# too little parameters
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("insert into test(id) values (?)")
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
|
||||
def CheckExecuteWrongNoOfArgs3(self):
|
||||
# no parameters, parameters are needed
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("insert into test(id) values (?)")
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
|
||||
def CheckExecuteParamList(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
|
@ -311,27 +295,18 @@ class CursorTests(unittest.TestCase):
|
|||
|
||||
def CheckExecuteDictMappingTooLittleArgs(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("select name from test where name=:name and id=:id", {"name": "foo"})
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
|
||||
def CheckExecuteDictMappingNoArgs(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("select name from test where name=:name")
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
|
||||
def CheckExecuteDictMappingUnnamed(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("select name from test where name=?", {"name": "foo"})
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
|
||||
def CheckClose(self):
|
||||
self.cu.close()
|
||||
|
@ -360,8 +335,7 @@ class CursorTests(unittest.TestCase):
|
|||
def CheckTotalChanges(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
if self.cx.total_changes < 2:
|
||||
self.fail("total changes reported wrong value")
|
||||
self.assertLess(2, self.cx.total_changes, msg='total changes reported wrong value')
|
||||
|
||||
# Checks for executemany:
|
||||
# Sequences are required by the DB-API, iterators
|
||||
|
@ -392,32 +366,16 @@ class CursorTests(unittest.TestCase):
|
|||
self.cu.executemany("insert into test(income) values (?)", mygen())
|
||||
|
||||
def CheckExecuteManyWrongSqlArg(self):
|
||||
try:
|
||||
with self.assertRaises(ValueError):
|
||||
self.cu.executemany(42, [(3,)])
|
||||
self.fail("should have raised a ValueError")
|
||||
except ValueError:
|
||||
return
|
||||
except:
|
||||
self.fail("raised wrong exception.")
|
||||
|
||||
def CheckExecuteManySelect(self):
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.executemany("select ?", [(3,)])
|
||||
self.fail("should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
self.fail("raised wrong exception.")
|
||||
|
||||
def CheckExecuteManyNotIterable(self):
|
||||
try:
|
||||
with self.assertRaises(TypeError):
|
||||
self.cu.executemany("insert into test(income) values (?)", 42)
|
||||
self.fail("should have raised a TypeError")
|
||||
except TypeError:
|
||||
return
|
||||
except Exception as e:
|
||||
print("raised", e.__class__)
|
||||
self.fail("raised wrong exception.")
|
||||
|
||||
def CheckFetchIter(self):
|
||||
# Optional DB-API extension.
|
||||
|
@ -494,22 +452,15 @@ class CursorTests(unittest.TestCase):
|
|||
self.assertEqual(self.cu.connection, self.cx)
|
||||
|
||||
def CheckWrongCursorCallable(self):
|
||||
try:
|
||||
with self.assertRaises(TypeError):
|
||||
def f(): pass
|
||||
cur = self.cx.cursor(f)
|
||||
self.fail("should have raised a TypeError")
|
||||
except TypeError:
|
||||
return
|
||||
self.fail("should have raised a ValueError")
|
||||
|
||||
def CheckCursorWrongClass(self):
|
||||
class Foo: pass
|
||||
foo = Foo()
|
||||
try:
|
||||
with self.assertRaises(TypeError):
|
||||
cur = sqlite.Cursor(foo)
|
||||
self.fail("should have raised a ValueError")
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
@unittest.skipUnless(threading, 'This test requires threading.')
|
||||
class ThreadTests(unittest.TestCase):
|
||||
|
@ -708,22 +659,21 @@ class ExtensionTests(unittest.TestCase):
|
|||
def CheckScriptSyntaxError(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
raised = False
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
cur.executescript("create table test(x); asdf; create table test2(x)")
|
||||
except sqlite.OperationalError:
|
||||
raised = True
|
||||
self.assertEqual(raised, True, "should have raised an exception")
|
||||
|
||||
def CheckScriptErrorNormal(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
raised = False
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
cur.executescript("create table test(sadfsadfdsa); select foo from hurz;")
|
||||
except sqlite.OperationalError:
|
||||
raised = True
|
||||
self.assertEqual(raised, True, "should have raised an exception")
|
||||
|
||||
def CheckCursorExecutescriptAsBytes(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
cur.executescript(b"create table test(foo); insert into test(foo) values (5);")
|
||||
self.assertEqual(str(cm.exception), 'script argument must be unicode.')
|
||||
|
||||
def CheckConnectionExecute(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
|
@ -745,68 +695,37 @@ class ExtensionTests(unittest.TestCase):
|
|||
self.assertEqual(result, 5, "Basic test of Connection.executescript")
|
||||
|
||||
class ClosedConTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def CheckClosedConCursor(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur = con.cursor()
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedConCommit(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.commit()
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedConRollback(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.rollback()
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedCurExecute(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
con.close()
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur.execute("select 4")
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedCreateFunction(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
def f(x): return 17
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.create_function("foo", 1, f)
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedCreateAggregate(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
|
@ -818,57 +737,31 @@ class ClosedConTests(unittest.TestCase):
|
|||
pass
|
||||
def finalize(self):
|
||||
return 17
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.create_aggregate("foo", 1, Agg)
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedSetAuthorizer(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
def authorizer(*args):
|
||||
return sqlite.DENY
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.set_authorizer(authorizer)
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedSetProgressCallback(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
def progress(): pass
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.set_progress_handler(progress, 100)
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
def CheckClosedCall(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con()
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError")
|
||||
|
||||
class ClosedCurTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def CheckClosed(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
|
@ -882,15 +775,9 @@ class ClosedCurTests(unittest.TestCase):
|
|||
else:
|
||||
params = []
|
||||
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
method = getattr(cur, method_name)
|
||||
|
||||
method(*params)
|
||||
self.fail("Should have raised a ProgrammingError: method " + method_name)
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("Should have raised a ProgrammingError: " + method_name)
|
||||
|
||||
def suite():
|
||||
module_suite = unittest.makeSuite(ModuleTests, "Check")
|
||||
|
|
|
@ -25,27 +25,16 @@ import unittest
|
|||
import sqlite3 as sqlite
|
||||
|
||||
class CollationTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def CheckCreateCollationNotCallable(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
try:
|
||||
with self.assertRaises(TypeError) as cm:
|
||||
con.create_collation("X", 42)
|
||||
self.fail("should have raised a TypeError")
|
||||
except TypeError as e:
|
||||
self.assertEqual(e.args[0], "parameter must be callable")
|
||||
self.assertEqual(str(cm.exception), 'parameter must be callable')
|
||||
|
||||
def CheckCreateCollationNotAscii(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.create_collation("collä", lambda x, y: (x > y) - (x < y))
|
||||
self.fail("should have raised a ProgrammingError")
|
||||
except sqlite.ProgrammingError as e:
|
||||
pass
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 1),
|
||||
'old SQLite versions crash on this test')
|
||||
|
@ -66,15 +55,13 @@ class CollationTests(unittest.TestCase):
|
|||
) order by x collate mycoll
|
||||
"""
|
||||
result = con.execute(sql).fetchall()
|
||||
if result[0][0] != "c" or result[1][0] != "b" or result[2][0] != "a":
|
||||
self.fail("the expected order was not returned")
|
||||
self.assertEqual(result, [('c',), ('b',), ('a',)],
|
||||
msg='the expected order was not returned')
|
||||
|
||||
con.create_collation("mycoll", None)
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
result = con.execute(sql).fetchall()
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError as e:
|
||||
self.assertEqual(e.args[0].lower(), "no such collation sequence: mycoll")
|
||||
self.assertEqual(str(cm.exception), 'no such collation sequence: mycoll')
|
||||
|
||||
def CheckCollationReturnsLargeInteger(self):
|
||||
def mycoll(x, y):
|
||||
|
@ -106,8 +93,8 @@ class CollationTests(unittest.TestCase):
|
|||
result = con.execute("""
|
||||
select x from (select 'a' as x union select 'b' as x) order by x collate mycoll
|
||||
""").fetchall()
|
||||
if result[0][0] != 'b' or result[1][0] != 'a':
|
||||
self.fail("wrong collation function is used")
|
||||
self.assertEqual(result[0][0], 'b')
|
||||
self.assertEqual(result[1][0], 'a')
|
||||
|
||||
def CheckDeregisterCollation(self):
|
||||
"""
|
||||
|
@ -117,12 +104,9 @@ class CollationTests(unittest.TestCase):
|
|||
con = sqlite.connect(":memory:")
|
||||
con.create_collation("mycoll", lambda x, y: (x > y) - (x < y))
|
||||
con.create_collation("mycoll", None)
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
con.execute("select 'a' as x union select 'b' as x order by x collate mycoll")
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError as e:
|
||||
if not e.args[0].startswith("no such collation sequence"):
|
||||
self.fail("wrong OperationalError raised")
|
||||
self.assertEqual(str(cm.exception), 'no such collation sequence: mycoll')
|
||||
|
||||
class ProgressTests(unittest.TestCase):
|
||||
def CheckProgressHandlerUsed(self):
|
||||
|
|
|
@ -84,9 +84,8 @@ class RegressionTests(unittest.TestCase):
|
|||
cur.execute("select 1 x union select " + str(i))
|
||||
con.close()
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 2), 'needs sqlite 3.2.2 or newer')
|
||||
def CheckOnConflictRollback(self):
|
||||
if sqlite.sqlite_version_info < (3, 2, 2):
|
||||
return
|
||||
con = sqlite.connect(":memory:")
|
||||
con.execute("create table foo(x, unique(x) on conflict rollback)")
|
||||
con.execute("insert into foo(x) values (1)")
|
||||
|
@ -134,17 +133,11 @@ class RegressionTests(unittest.TestCase):
|
|||
def CheckErrorMsgDecodeError(self):
|
||||
# When porting the module to Python 3.0, the error message about
|
||||
# decoding errors disappeared. This verifies they're back again.
|
||||
failure = None
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
self.con.execute("select 'xxx' || ? || 'yyy' colname",
|
||||
(bytes(bytearray([250])),)).fetchone()
|
||||
failure = "should have raised an OperationalError with detailed description"
|
||||
except sqlite.OperationalError as e:
|
||||
msg = e.args[0]
|
||||
if not msg.startswith("Could not decode to UTF-8 column 'colname' with text 'xxx"):
|
||||
failure = "OperationalError did not have expected description text"
|
||||
if failure:
|
||||
self.fail(failure)
|
||||
msg = "Could not decode to UTF-8 column 'colname' with text 'xxx"
|
||||
self.assertIn(msg, str(cm.exception))
|
||||
|
||||
def CheckRegisterAdapter(self):
|
||||
"""
|
||||
|
@ -170,14 +163,8 @@ class RegressionTests(unittest.TestCase):
|
|||
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = Cursor(con)
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur.execute("select 4+5").fetchall()
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("should have raised ProgrammingError")
|
||||
|
||||
|
||||
def CheckStrSubclass(self):
|
||||
"""
|
||||
|
@ -196,13 +183,8 @@ class RegressionTests(unittest.TestCase):
|
|||
pass
|
||||
|
||||
con = Connection(":memory:")
|
||||
try:
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur = con.cursor()
|
||||
self.fail("should have raised ProgrammingError")
|
||||
except sqlite.ProgrammingError:
|
||||
pass
|
||||
except:
|
||||
self.fail("should have raised ProgrammingError")
|
||||
|
||||
def CheckCursorRegistration(self):
|
||||
"""
|
||||
|
@ -223,13 +205,8 @@ class RegressionTests(unittest.TestCase):
|
|||
cur.executemany("insert into foo(x) values (?)", [(3,), (4,), (5,)])
|
||||
cur.execute("select x from foo")
|
||||
con.rollback()
|
||||
try:
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
cur.fetchall()
|
||||
self.fail("should have raised InterfaceError")
|
||||
except sqlite.InterfaceError:
|
||||
pass
|
||||
except:
|
||||
self.fail("should have raised InterfaceError")
|
||||
|
||||
def CheckAutoCommit(self):
|
||||
"""
|
||||
|
|
|
@ -111,39 +111,25 @@ class TransactionTests(unittest.TestCase):
|
|||
res = self.cur2.fetchall()
|
||||
self.assertEqual(len(res), 1)
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 2),
|
||||
'test hangs on sqlite versions older than 3.2.2')
|
||||
def CheckRaiseTimeout(self):
|
||||
if sqlite.sqlite_version_info < (3, 2, 2):
|
||||
# This will fail (hang) on earlier versions of sqlite.
|
||||
# Determine exact version it was fixed. 3.2.1 hangs.
|
||||
return
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.cur2.execute("insert into test(i) values (5)")
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError:
|
||||
pass
|
||||
except:
|
||||
self.fail("should have raised an OperationalError")
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 2),
|
||||
'test hangs on sqlite versions older than 3.2.2')
|
||||
def CheckLocking(self):
|
||||
"""
|
||||
This tests the improved concurrency with pysqlite 2.3.4. You needed
|
||||
to roll back con2 before you could commit con1.
|
||||
"""
|
||||
if sqlite.sqlite_version_info < (3, 2, 2):
|
||||
# This will fail (hang) on earlier versions of sqlite.
|
||||
# Determine exact version it was fixed. 3.2.1 hangs.
|
||||
return
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.cur2.execute("insert into test(i) values (5)")
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError:
|
||||
pass
|
||||
except:
|
||||
self.fail("should have raised an OperationalError")
|
||||
# NO self.con2.rollback() HERE!!!
|
||||
self.con1.commit()
|
||||
|
||||
|
@ -159,13 +145,8 @@ class TransactionTests(unittest.TestCase):
|
|||
cur.execute("select 1 union select 2 union select 3")
|
||||
|
||||
con.rollback()
|
||||
try:
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
cur.fetchall()
|
||||
self.fail("InterfaceError should have been raised")
|
||||
except sqlite.InterfaceError as e:
|
||||
pass
|
||||
except:
|
||||
self.fail("InterfaceError should have been raised")
|
||||
|
||||
class SpecialCommandTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
|
|
@ -185,24 +185,14 @@ class DeclTypesTests(unittest.TestCase):
|
|||
def CheckUnsupportedSeq(self):
|
||||
class Bar: pass
|
||||
val = Bar()
|
||||
try:
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
self.cur.execute("insert into test(f) values (?)", (val,))
|
||||
self.fail("should have raised an InterfaceError")
|
||||
except sqlite.InterfaceError:
|
||||
pass
|
||||
except:
|
||||
self.fail("should have raised an InterfaceError")
|
||||
|
||||
def CheckUnsupportedDict(self):
|
||||
class Bar: pass
|
||||
val = Bar()
|
||||
try:
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
self.cur.execute("insert into test(f) values (:val)", {"val": val})
|
||||
self.fail("should have raised an InterfaceError")
|
||||
except sqlite.InterfaceError:
|
||||
pass
|
||||
except:
|
||||
self.fail("should have raised an InterfaceError")
|
||||
|
||||
def CheckBlob(self):
|
||||
# default
|
||||
|
@ -350,11 +340,9 @@ class DateTimeTests(unittest.TestCase):
|
|||
ts2 = self.cur.fetchone()[0]
|
||||
self.assertEqual(ts, ts2)
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 1),
|
||||
'the date functions are available on 3.1 or later')
|
||||
def CheckSqlTimestamp(self):
|
||||
# The date functions are only available in SQLite version 3.1 or later
|
||||
if sqlite.sqlite_version_info < (3, 1):
|
||||
return
|
||||
|
||||
# SQLite's current_timestamp uses UTC time, while datetime.datetime.now() uses local time.
|
||||
now = datetime.datetime.now()
|
||||
self.cur.execute("insert into test(ts) values (current_timestamp)")
|
||||
|
|
|
@ -162,11 +162,8 @@ class FunctionTests(unittest.TestCase):
|
|||
self.con.close()
|
||||
|
||||
def CheckFuncErrorOnCreate(self):
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.con.create_function("bla", -100, lambda x: 2*x)
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError:
|
||||
pass
|
||||
|
||||
def CheckFuncRefCount(self):
|
||||
def getfunc():
|
||||
|
@ -231,12 +228,10 @@ class FunctionTests(unittest.TestCase):
|
|||
|
||||
def CheckFuncException(self):
|
||||
cur = self.con.cursor()
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select raiseexception()")
|
||||
cur.fetchone()
|
||||
self.fail("should have raised OperationalError")
|
||||
except sqlite.OperationalError as e:
|
||||
self.assertEqual(e.args[0], 'user-defined function raised exception')
|
||||
self.assertEqual(str(cm.exception), 'user-defined function raised exception')
|
||||
|
||||
def CheckParamString(self):
|
||||
cur = self.con.cursor()
|
||||
|
@ -312,55 +307,42 @@ class AggregateTests(unittest.TestCase):
|
|||
pass
|
||||
|
||||
def CheckAggrErrorOnCreate(self):
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.con.create_function("bla", -100, AggrSum)
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError:
|
||||
pass
|
||||
|
||||
def CheckAggrNoStep(self):
|
||||
cur = self.con.cursor()
|
||||
try:
|
||||
with self.assertRaises(AttributeError) as cm:
|
||||
cur.execute("select nostep(t) from test")
|
||||
self.fail("should have raised an AttributeError")
|
||||
except AttributeError as e:
|
||||
self.assertEqual(e.args[0], "'AggrNoStep' object has no attribute 'step'")
|
||||
self.assertEqual(str(cm.exception), "'AggrNoStep' object has no attribute 'step'")
|
||||
|
||||
def CheckAggrNoFinalize(self):
|
||||
cur = self.con.cursor()
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select nofinalize(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError as e:
|
||||
self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
|
||||
|
||||
def CheckAggrExceptionInInit(self):
|
||||
cur = self.con.cursor()
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select excInit(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError as e:
|
||||
self.assertEqual(e.args[0], "user-defined aggregate's '__init__' method raised error")
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's '__init__' method raised error")
|
||||
|
||||
def CheckAggrExceptionInStep(self):
|
||||
cur = self.con.cursor()
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select excStep(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError as e:
|
||||
self.assertEqual(e.args[0], "user-defined aggregate's 'step' method raised error")
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error")
|
||||
|
||||
def CheckAggrExceptionInFinalize(self):
|
||||
cur = self.con.cursor()
|
||||
try:
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select excFinalize(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.fail("should have raised an OperationalError")
|
||||
except sqlite.OperationalError as e:
|
||||
self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
|
||||
|
||||
def CheckAggrCheckParamStr(self):
|
||||
cur = self.con.cursor()
|
||||
|
@ -433,22 +415,14 @@ class AuthorizerTests(unittest.TestCase):
|
|||
pass
|
||||
|
||||
def test_table_access(self):
|
||||
try:
|
||||
with self.assertRaises(sqlite.DatabaseError) as cm:
|
||||
self.con.execute("select * from t2")
|
||||
except sqlite.DatabaseError as e:
|
||||
if not e.args[0].endswith("prohibited"):
|
||||
self.fail("wrong exception text: %s" % e.args[0])
|
||||
return
|
||||
self.fail("should have raised an exception due to missing privileges")
|
||||
self.assertIn('prohibited', str(cm.exception))
|
||||
|
||||
def test_column_access(self):
|
||||
try:
|
||||
with self.assertRaises(sqlite.DatabaseError) as cm:
|
||||
self.con.execute("select c2 from t1")
|
||||
except sqlite.DatabaseError as e:
|
||||
if not e.args[0].endswith("prohibited"):
|
||||
self.fail("wrong exception text: %s" % e.args[0])
|
||||
return
|
||||
self.fail("should have raised an exception due to missing privileges")
|
||||
self.assertIn('prohibited', str(cm.exception))
|
||||
|
||||
class AuthorizerRaiseExceptionTests(AuthorizerTests):
|
||||
@staticmethod
|
||||
|
|
|
@ -1349,7 +1349,8 @@ def transient_internet(resource_name, *, timeout=30.0, errnos=()):
|
|||
500 <= err.code <= 599) or
|
||||
(isinstance(err, urllib.error.URLError) and
|
||||
(("ConnectionRefusedError" in err.reason) or
|
||||
("TimeoutError" in err.reason))) or
|
||||
("TimeoutError" in err.reason) or
|
||||
("EOFError" in err.reason))) or
|
||||
n in captured_errnos):
|
||||
if not verbose:
|
||||
sys.stderr.write(denied.args[0] + "\n")
|
||||
|
|
|
@ -73,6 +73,10 @@ def run_python_until_end(*args, **env_vars):
|
|||
# Need to preserve the original environment, for in-place testing of
|
||||
# shared library builds.
|
||||
env = os.environ.copy()
|
||||
# set TERM='' unless the TERM environment variable is passed explicitly
|
||||
# see issues #11390 and #18300
|
||||
if 'TERM' not in env_vars:
|
||||
env['TERM'] = ''
|
||||
# But a special flag that can be set to override -- in this case, the
|
||||
# caller is responsible to pass the full environment.
|
||||
if env_vars.pop('__cleanenv', None):
|
||||
|
|
|
@ -1185,14 +1185,14 @@ class BaseEventLoopWithSelectorTests(test_utils.TestCase):
|
|||
test_utils.run_briefly(self.loop) # allow transport to close
|
||||
|
||||
sock.family = socket.AF_INET6
|
||||
coro = self.loop.create_connection(asyncio.Protocol, '::2', 80)
|
||||
coro = self.loop.create_connection(asyncio.Protocol, '::1', 80)
|
||||
t, p = self.loop.run_until_complete(coro)
|
||||
try:
|
||||
# Without inet_pton we use getaddrinfo, which transforms ('::2', 80)
|
||||
# to ('::0.0.0.2', 80, 0, 0). The last 0s are flow info, scope id.
|
||||
# Without inet_pton we use getaddrinfo, which transforms ('::1', 80)
|
||||
# to ('::1', 80, 0, 0). The last 0s are flow info, scope id.
|
||||
[address] = sock.connect.call_args[0]
|
||||
host, port = address[:2]
|
||||
self.assertRegex(host, r'::(0\.)*2')
|
||||
self.assertRegex(host, r'::(0\.)*1')
|
||||
self.assertEqual(port, 80)
|
||||
_, kwargs = m_socket.socket.call_args
|
||||
self.assertEqual(kwargs['family'], m_socket.AF_INET6)
|
||||
|
|
|
@ -472,6 +472,13 @@ if 1:
|
|||
d = {f(): f(), f(): f()}
|
||||
self.assertEqual(d, {1: 2, 3: 4})
|
||||
|
||||
def test_compile_filename(self):
|
||||
for filename in ('file.py', b'file.py',
|
||||
bytearray(b'file.py'), memoryview(b'file.py')):
|
||||
code = compile('pass', filename, 'exec')
|
||||
self.assertEqual(code.co_filename, 'file.py')
|
||||
self.assertRaises(TypeError, compile, 'pass', list(b'file.py'), 'exec')
|
||||
|
||||
@support.cpython_only
|
||||
def test_same_filename_used(self):
|
||||
s = """def f(): pass\ndef g(): pass"""
|
||||
|
|
|
@ -762,6 +762,40 @@ class TestExitStack(unittest.TestCase):
|
|||
stack.push(cm)
|
||||
self.assertIs(stack._exit_callbacks[-1], cm)
|
||||
|
||||
def test_dont_reraise_RuntimeError(self):
|
||||
# https://bugs.python.org/issue27122
|
||||
class UniqueException(Exception): pass
|
||||
class UniqueRuntimeError(RuntimeError): pass
|
||||
|
||||
@contextmanager
|
||||
def second():
|
||||
try:
|
||||
yield 1
|
||||
except Exception as exc:
|
||||
raise UniqueException("new exception") from exc
|
||||
|
||||
@contextmanager
|
||||
def first():
|
||||
try:
|
||||
yield 1
|
||||
except Exception as exc:
|
||||
raise exc
|
||||
|
||||
# The UniqueRuntimeError should be caught by second()'s exception
|
||||
# handler which chain raised a new UniqueException.
|
||||
with self.assertRaises(UniqueException) as err_ctx:
|
||||
with ExitStack() as es_ctx:
|
||||
es_ctx.enter_context(second())
|
||||
es_ctx.enter_context(first())
|
||||
raise UniqueRuntimeError("please no infinite loop.")
|
||||
|
||||
exc = err_ctx.exception
|
||||
self.assertIsInstance(exc, UniqueException)
|
||||
self.assertIsInstance(exc.__context__, UniqueRuntimeError)
|
||||
self.assertIsNone(exc.__context__.__context__)
|
||||
self.assertIsNone(exc.__context__.__cause__)
|
||||
self.assertIs(exc.__cause__, exc.__context__)
|
||||
|
||||
|
||||
class TestRedirectStream:
|
||||
|
||||
|
|
|
@ -1423,7 +1423,7 @@ class CoroutineTest(unittest.TestCase):
|
|||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
# Test that __aiter__ that returns an asyncronous iterator
|
||||
# Test that __aiter__ that returns an asynchronous iterator
|
||||
# directly does not throw any warnings.
|
||||
run_async(main())
|
||||
self.assertEqual(I, 111011)
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#
|
||||
|
||||
import os
|
||||
import string
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
@ -399,6 +400,55 @@ class MiscTests(unittest.TestCase):
|
|||
|
||||
class TestAscii(unittest.TestCase):
|
||||
|
||||
def test_controlnames(self):
|
||||
for name in curses.ascii.controlnames:
|
||||
self.assertTrue(hasattr(curses.ascii, name), name)
|
||||
|
||||
def test_ctypes(self):
|
||||
def check(func, expected):
|
||||
with self.subTest(ch=c, func=func):
|
||||
self.assertEqual(func(i), expected)
|
||||
self.assertEqual(func(c), expected)
|
||||
|
||||
for i in range(256):
|
||||
c = chr(i)
|
||||
b = bytes([i])
|
||||
check(curses.ascii.isalnum, b.isalnum())
|
||||
check(curses.ascii.isalpha, b.isalpha())
|
||||
check(curses.ascii.isdigit, b.isdigit())
|
||||
check(curses.ascii.islower, b.islower())
|
||||
check(curses.ascii.isspace, b.isspace())
|
||||
check(curses.ascii.isupper, b.isupper())
|
||||
|
||||
check(curses.ascii.isascii, i < 128)
|
||||
check(curses.ascii.ismeta, i >= 128)
|
||||
check(curses.ascii.isctrl, i < 32)
|
||||
check(curses.ascii.iscntrl, i < 32 or i == 127)
|
||||
check(curses.ascii.isblank, c in ' \t')
|
||||
check(curses.ascii.isgraph, 32 < i <= 126)
|
||||
check(curses.ascii.isprint, 32 <= i <= 126)
|
||||
check(curses.ascii.ispunct, c in string.punctuation)
|
||||
check(curses.ascii.isxdigit, c in string.hexdigits)
|
||||
|
||||
def test_ascii(self):
|
||||
ascii = curses.ascii.ascii
|
||||
self.assertEqual(ascii('\xc1'), 'A')
|
||||
self.assertEqual(ascii('A'), 'A')
|
||||
self.assertEqual(ascii(ord('\xc1')), ord('A'))
|
||||
|
||||
def test_ctrl(self):
|
||||
ctrl = curses.ascii.ctrl
|
||||
self.assertEqual(ctrl('J'), '\n')
|
||||
self.assertEqual(ctrl('\n'), '\n')
|
||||
self.assertEqual(ctrl('@'), '\0')
|
||||
self.assertEqual(ctrl(ord('J')), ord('\n'))
|
||||
|
||||
def test_alt(self):
|
||||
alt = curses.ascii.alt
|
||||
self.assertEqual(alt('\n'), '\x8a')
|
||||
self.assertEqual(alt('A'), '\xc1')
|
||||
self.assertEqual(alt(ord('A')), 0xc1)
|
||||
|
||||
def test_unctrl(self):
|
||||
unctrl = curses.ascii.unctrl
|
||||
self.assertEqual(unctrl('a'), 'a')
|
||||
|
@ -408,9 +458,13 @@ class TestAscii(unittest.TestCase):
|
|||
self.assertEqual(unctrl('\x7f'), '^?')
|
||||
self.assertEqual(unctrl('\n'), '^J')
|
||||
self.assertEqual(unctrl('\0'), '^@')
|
||||
self.assertEqual(unctrl(ord('A')), 'A')
|
||||
self.assertEqual(unctrl(ord('\n')), '^J')
|
||||
# Meta-bit characters
|
||||
self.assertEqual(unctrl('\x8a'), '!^J')
|
||||
self.assertEqual(unctrl('\xc1'), '!A')
|
||||
self.assertEqual(unctrl(ord('\x8a')), '!^J')
|
||||
self.assertEqual(unctrl(ord('\xc1')), '!A')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -2491,7 +2491,8 @@ class PythonAPItests(unittest.TestCase):
|
|||
Decimal = self.decimal.Decimal
|
||||
|
||||
class MyDecimal(Decimal):
|
||||
pass
|
||||
def __init__(self, _):
|
||||
self.x = 'y'
|
||||
|
||||
self.assertTrue(issubclass(MyDecimal, Decimal))
|
||||
|
||||
|
@ -2499,6 +2500,8 @@ class PythonAPItests(unittest.TestCase):
|
|||
self.assertEqual(type(r), MyDecimal)
|
||||
self.assertEqual(str(r),
|
||||
'0.1000000000000000055511151231257827021181583404541015625')
|
||||
self.assertEqual(r.x, 'y')
|
||||
|
||||
bigint = 12345678901234567890123456789
|
||||
self.assertEqual(MyDecimal.from_float(bigint), MyDecimal(bigint))
|
||||
self.assertTrue(MyDecimal.from_float(float('nan')).is_qnan())
|
||||
|
|
|
@ -2719,12 +2719,6 @@ output into something we can doctest against:
|
|||
>>> def normalize(s):
|
||||
... return '\n'.join(s.decode().splitlines())
|
||||
|
||||
Note: we also pass TERM='' to all the assert_python calls to avoid a bug
|
||||
in the readline library that is triggered in these tests because we are
|
||||
running them in a new python process. See:
|
||||
|
||||
http://lists.gnu.org/archive/html/bug-readline/2013-06/msg00000.html
|
||||
|
||||
With those preliminaries out of the way, we'll start with a file with two
|
||||
simple tests and no errors. We'll run both the unadorned doctest command, and
|
||||
the verbose version, and then check the output:
|
||||
|
@ -2741,9 +2735,9 @@ the verbose version, and then check the output:
|
|||
... _ = f.write('\n')
|
||||
... _ = f.write('And that is it.\n')
|
||||
... rc1, out1, err1 = script_helper.assert_python_ok(
|
||||
... '-m', 'doctest', fn, TERM='')
|
||||
... '-m', 'doctest', fn)
|
||||
... rc2, out2, err2 = script_helper.assert_python_ok(
|
||||
... '-m', 'doctest', '-v', fn, TERM='')
|
||||
... '-m', 'doctest', '-v', fn)
|
||||
|
||||
With no arguments and passing tests, we should get no output:
|
||||
|
||||
|
@ -2806,17 +2800,17 @@ text files).
|
|||
... _ = f.write(' \"\"\"\n')
|
||||
... import shutil
|
||||
... rc1, out1, err1 = script_helper.assert_python_failure(
|
||||
... '-m', 'doctest', fn, fn2, TERM='')
|
||||
... '-m', 'doctest', fn, fn2)
|
||||
... rc2, out2, err2 = script_helper.assert_python_ok(
|
||||
... '-m', 'doctest', '-o', 'ELLIPSIS', fn, TERM='')
|
||||
... '-m', 'doctest', '-o', 'ELLIPSIS', fn)
|
||||
... rc3, out3, err3 = script_helper.assert_python_ok(
|
||||
... '-m', 'doctest', '-o', 'ELLIPSIS',
|
||||
... '-o', 'NORMALIZE_WHITESPACE', fn, fn2, TERM='')
|
||||
... '-o', 'NORMALIZE_WHITESPACE', fn, fn2)
|
||||
... rc4, out4, err4 = script_helper.assert_python_failure(
|
||||
... '-m', 'doctest', '-f', fn, fn2, TERM='')
|
||||
... '-m', 'doctest', '-f', fn, fn2)
|
||||
... rc5, out5, err5 = script_helper.assert_python_ok(
|
||||
... '-m', 'doctest', '-v', '-o', 'ELLIPSIS',
|
||||
... '-o', 'NORMALIZE_WHITESPACE', fn, fn2, TERM='')
|
||||
... '-o', 'NORMALIZE_WHITESPACE', fn, fn2)
|
||||
|
||||
Our first test run will show the errors from the first file (doctest stops if a
|
||||
file has errors). Note that doctest test-run error output appears on stdout,
|
||||
|
@ -2922,7 +2916,7 @@ We should also check some typical error cases.
|
|||
Invalid file name:
|
||||
|
||||
>>> rc, out, err = script_helper.assert_python_failure(
|
||||
... '-m', 'doctest', 'nosuchfile', TERM='')
|
||||
... '-m', 'doctest', 'nosuchfile')
|
||||
>>> rc, out
|
||||
(1, b'')
|
||||
>>> print(normalize(err)) # doctest: +ELLIPSIS
|
||||
|
@ -2933,7 +2927,7 @@ Invalid file name:
|
|||
Invalid doctest option:
|
||||
|
||||
>>> rc, out, err = script_helper.assert_python_failure(
|
||||
... '-m', 'doctest', '-o', 'nosuchoption', TERM='')
|
||||
... '-m', 'doctest', '-o', 'nosuchoption')
|
||||
>>> rc, out
|
||||
(2, b'')
|
||||
>>> print(normalize(err)) # doctest: +ELLIPSIS
|
||||
|
|
|
@ -57,6 +57,10 @@ Here we add keyword arguments
|
|||
Traceback (most recent call last):
|
||||
...
|
||||
TypeError: f() got multiple values for keyword argument 'a'
|
||||
>>> f(1, 2, a=3, **{'a': 4}, **{'a': 5})
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
TypeError: f() got multiple values for keyword argument 'a'
|
||||
>>> f(1, 2, 3, *[4, 5], **{'a':6, 'b':7})
|
||||
(1, 2, 3, 4, 5) {'a': 6, 'b': 7}
|
||||
>>> f(1, 2, 3, x=4, y=5, *(6, 7), **{'a':8, 'b': 9})
|
||||
|
|
|
@ -217,6 +217,33 @@ class TestPartialC(TestPartial, unittest.TestCase):
|
|||
['{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr)
|
||||
for kwargs_repr in kwargs_reprs])
|
||||
|
||||
def test_recursive_repr(self):
|
||||
if self.partial is c_functools.partial:
|
||||
name = 'functools.partial'
|
||||
else:
|
||||
name = self.partial.__name__
|
||||
|
||||
f = self.partial(capture)
|
||||
f.__setstate__((f, (), {}, {}))
|
||||
try:
|
||||
self.assertEqual(repr(f), '%s(%s(...))' % (name, name))
|
||||
finally:
|
||||
f.__setstate__((capture, (), {}, {}))
|
||||
|
||||
f = self.partial(capture)
|
||||
f.__setstate__((capture, (f,), {}, {}))
|
||||
try:
|
||||
self.assertEqual(repr(f), '%s(%r, %s(...))' % (name, capture, name))
|
||||
finally:
|
||||
f.__setstate__((capture, (), {}, {}))
|
||||
|
||||
f = self.partial(capture)
|
||||
f.__setstate__((capture, (), {'a': f}, {}))
|
||||
try:
|
||||
self.assertEqual(repr(f), '%s(%r, a=%s(...))' % (name, capture, name))
|
||||
finally:
|
||||
f.__setstate__((capture, (), {}, {}))
|
||||
|
||||
def test_pickle(self):
|
||||
f = self.partial(signature, ['asdf'], bar=[True])
|
||||
f.attr = []
|
||||
|
@ -297,6 +324,40 @@ class TestPartialC(TestPartial, unittest.TestCase):
|
|||
self.assertEqual(r, ((1, 2), {}))
|
||||
self.assertIs(type(r[0]), tuple)
|
||||
|
||||
def test_recursive_pickle(self):
|
||||
f = self.partial(capture)
|
||||
f.__setstate__((f, (), {}, {}))
|
||||
try:
|
||||
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
|
||||
with self.assertRaises(RecursionError):
|
||||
pickle.dumps(f, proto)
|
||||
finally:
|
||||
f.__setstate__((capture, (), {}, {}))
|
||||
|
||||
f = self.partial(capture)
|
||||
f.__setstate__((capture, (f,), {}, {}))
|
||||
try:
|
||||
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
|
||||
f_copy = pickle.loads(pickle.dumps(f, proto))
|
||||
try:
|
||||
self.assertIs(f_copy.args[0], f_copy)
|
||||
finally:
|
||||
f_copy.__setstate__((capture, (), {}, {}))
|
||||
finally:
|
||||
f.__setstate__((capture, (), {}, {}))
|
||||
|
||||
f = self.partial(capture)
|
||||
f.__setstate__((capture, (), {'a': f}, {}))
|
||||
try:
|
||||
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
|
||||
f_copy = pickle.loads(pickle.dumps(f, proto))
|
||||
try:
|
||||
self.assertIs(f_copy.keywords['a'], f_copy)
|
||||
finally:
|
||||
f_copy.__setstate__((capture, (), {}, {}))
|
||||
finally:
|
||||
f.__setstate__((capture, (), {}, {}))
|
||||
|
||||
# Issue 6083: Reference counting bug
|
||||
def test_setstate_refcount(self):
|
||||
class BadSequence:
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import importlib
|
||||
from importlib import abc
|
||||
from importlib import util
|
||||
import sys
|
||||
import types
|
||||
import unittest
|
||||
|
||||
from . import util as test_util
|
||||
|
@ -122,12 +124,20 @@ class LazyLoaderTests(unittest.TestCase):
|
|||
self.assertFalse(hasattr(module, '__name__'))
|
||||
|
||||
def test_module_substitution_error(self):
|
||||
source_code = 'import sys; sys.modules[__name__] = 42'
|
||||
module = self.new_module(source_code)
|
||||
with test_util.uncache(TestingImporter.module_name):
|
||||
with self.assertRaises(ValueError):
|
||||
fresh_module = types.ModuleType(TestingImporter.module_name)
|
||||
sys.modules[TestingImporter.module_name] = fresh_module
|
||||
module = self.new_module()
|
||||
with self.assertRaisesRegex(ValueError, "substituted"):
|
||||
module.__name__
|
||||
|
||||
def test_module_already_in_sys(self):
|
||||
with test_util.uncache(TestingImporter.module_name):
|
||||
module = self.new_module()
|
||||
sys.modules[TestingImporter.module_name] = module
|
||||
# Force the load; just care that no exception is raised.
|
||||
module.__name__
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -627,6 +627,22 @@ class CompileTestCase(unittest.TestCase):
|
|||
code2 = parser.compilest(st)
|
||||
self.assertEqual(eval(code2), -3)
|
||||
|
||||
def test_compile_filename(self):
|
||||
st = parser.expr('a + 5')
|
||||
code = parser.compilest(st)
|
||||
self.assertEqual(code.co_filename, '<syntax-tree>')
|
||||
code = st.compile()
|
||||
self.assertEqual(code.co_filename, '<syntax-tree>')
|
||||
for filename in ('file.py', b'file.py',
|
||||
bytearray(b'file.py'), memoryview(b'file.py')):
|
||||
code = parser.compilest(st, filename)
|
||||
self.assertEqual(code.co_filename, 'file.py')
|
||||
code = st.compile(filename)
|
||||
self.assertEqual(code.co_filename, 'file.py')
|
||||
self.assertRaises(TypeError, parser.compilest, st, list(b'file.py'))
|
||||
self.assertRaises(TypeError, st.compile, list(b'file.py'))
|
||||
|
||||
|
||||
class ParserStackLimitTestCase(unittest.TestCase):
|
||||
"""try to push the parser to/over its limits.
|
||||
see http://bugs.python.org/issue1881 for a discussion
|
||||
|
|
|
@ -356,7 +356,7 @@ def get_pydoc_html(module):
|
|||
def get_pydoc_link(module):
|
||||
"Returns a documentation web link of a module"
|
||||
dirname = os.path.dirname
|
||||
basedir = os.path.join(dirname(dirname(__file__)))
|
||||
basedir = dirname(dirname(__file__))
|
||||
doc = pydoc.TextDoc()
|
||||
loc = doc.getdocloc(module, basedir=basedir)
|
||||
return loc
|
||||
|
|
|
@ -1,15 +1,25 @@
|
|||
"""
|
||||
Very minimal unittests for parts of the readline module.
|
||||
"""
|
||||
from contextlib import ExitStack
|
||||
from errno import EIO
|
||||
import os
|
||||
import selectors
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from test.support import import_module, unlink
|
||||
from test.support import import_module, unlink, TESTFN
|
||||
from test.support.script_helper import assert_python_ok
|
||||
|
||||
# Skip tests if there is no readline module
|
||||
readline = import_module('readline')
|
||||
|
||||
is_editline = readline.__doc__ and "libedit" in readline.__doc__
|
||||
|
||||
@unittest.skipUnless(hasattr(readline, "clear_history"),
|
||||
"The history update test cannot be run because the "
|
||||
"clear_history method is not available.")
|
||||
class TestHistoryManipulation (unittest.TestCase):
|
||||
"""
|
||||
These tests were added to check that the libedit emulation on OSX and the
|
||||
|
@ -17,9 +27,6 @@ class TestHistoryManipulation (unittest.TestCase):
|
|||
why the tests cover only a small subset of the interface.
|
||||
"""
|
||||
|
||||
@unittest.skipUnless(hasattr(readline, "clear_history"),
|
||||
"The history update test cannot be run because the "
|
||||
"clear_history method is not available.")
|
||||
def testHistoryUpdates(self):
|
||||
readline.clear_history()
|
||||
|
||||
|
@ -82,11 +89,29 @@ class TestHistoryManipulation (unittest.TestCase):
|
|||
# write_history_file can create the target
|
||||
readline.write_history_file(hfilename)
|
||||
|
||||
def test_nonascii_history(self):
|
||||
readline.clear_history()
|
||||
try:
|
||||
readline.add_history("entrée 1")
|
||||
except UnicodeEncodeError as err:
|
||||
self.skipTest("Locale cannot encode test data: " + format(err))
|
||||
readline.add_history("entrée 2")
|
||||
readline.replace_history_item(1, "entrée 22")
|
||||
readline.write_history_file(TESTFN)
|
||||
self.addCleanup(os.remove, TESTFN)
|
||||
readline.clear_history()
|
||||
readline.read_history_file(TESTFN)
|
||||
if is_editline:
|
||||
# An add_history() call seems to be required for get_history_
|
||||
# item() to register items from the file
|
||||
readline.add_history("dummy")
|
||||
self.assertEqual(readline.get_history_item(1), "entrée 1")
|
||||
self.assertEqual(readline.get_history_item(2), "entrée 22")
|
||||
|
||||
|
||||
class TestReadline(unittest.TestCase):
|
||||
|
||||
@unittest.skipIf(readline._READLINE_VERSION < 0x0600
|
||||
and "libedit" not in readline.__doc__,
|
||||
@unittest.skipIf(readline._READLINE_VERSION < 0x0600 and not is_editline,
|
||||
"not supported in this library version")
|
||||
def test_init(self):
|
||||
# Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not
|
||||
|
@ -96,6 +121,130 @@ class TestReadline(unittest.TestCase):
|
|||
TERM='xterm-256color')
|
||||
self.assertEqual(stdout, b'')
|
||||
|
||||
def test_nonascii(self):
|
||||
try:
|
||||
readline.add_history("\xEB\xEF")
|
||||
except UnicodeEncodeError as err:
|
||||
self.skipTest("Locale cannot encode test data: " + format(err))
|
||||
|
||||
script = r"""import readline
|
||||
|
||||
is_editline = readline.__doc__ and "libedit" in readline.__doc__
|
||||
inserted = "[\xEFnserted]"
|
||||
macro = "|t\xEB[after]"
|
||||
set_pre_input_hook = getattr(readline, "set_pre_input_hook", None)
|
||||
if is_editline or not set_pre_input_hook:
|
||||
# The insert_line() call via pre_input_hook() does nothing with Editline,
|
||||
# so include the extra text that would have been inserted here
|
||||
macro = inserted + macro
|
||||
|
||||
if is_editline:
|
||||
readline.parse_and_bind(r'bind ^B ed-prev-char')
|
||||
readline.parse_and_bind(r'bind "\t" rl_complete')
|
||||
readline.parse_and_bind(r'bind -s ^A "{}"'.format(macro))
|
||||
else:
|
||||
readline.parse_and_bind(r'Control-b: backward-char')
|
||||
readline.parse_and_bind(r'"\t": complete')
|
||||
readline.parse_and_bind(r'set disable-completion off')
|
||||
readline.parse_and_bind(r'set show-all-if-ambiguous off')
|
||||
readline.parse_and_bind(r'set show-all-if-unmodified off')
|
||||
readline.parse_and_bind(r'Control-a: "{}"'.format(macro))
|
||||
|
||||
def pre_input_hook():
|
||||
readline.insert_text(inserted)
|
||||
readline.redisplay()
|
||||
if set_pre_input_hook:
|
||||
set_pre_input_hook(pre_input_hook)
|
||||
|
||||
def completer(text, state):
|
||||
if text == "t\xEB":
|
||||
if state == 0:
|
||||
print("text", ascii(text))
|
||||
print("line", ascii(readline.get_line_buffer()))
|
||||
print("indexes", readline.get_begidx(), readline.get_endidx())
|
||||
return "t\xEBnt"
|
||||
if state == 1:
|
||||
return "t\xEBxt"
|
||||
if text == "t\xEBx" and state == 0:
|
||||
return "t\xEBxt"
|
||||
return None
|
||||
readline.set_completer(completer)
|
||||
|
||||
def display(substitution, matches, longest_match_length):
|
||||
print("substitution", ascii(substitution))
|
||||
print("matches", ascii(matches))
|
||||
readline.set_completion_display_matches_hook(display)
|
||||
|
||||
print("result", ascii(input()))
|
||||
print("history", ascii(readline.get_history_item(1)))
|
||||
"""
|
||||
|
||||
input = b"\x01" # Ctrl-A, expands to "|t\xEB[after]"
|
||||
input += b"\x02" * len("[after]") # Move cursor back
|
||||
input += b"\t\t" # Display possible completions
|
||||
input += b"x\t" # Complete "t\xEBx" -> "t\xEBxt"
|
||||
input += b"\r"
|
||||
output = run_pty(script, input)
|
||||
self.assertIn(b"text 't\\xeb'\r\n", output)
|
||||
self.assertIn(b"line '[\\xefnserted]|t\\xeb[after]'\r\n", output)
|
||||
self.assertIn(b"indexes 11 13\r\n", output)
|
||||
if not is_editline and hasattr(readline, "set_pre_input_hook"):
|
||||
self.assertIn(b"substitution 't\\xeb'\r\n", output)
|
||||
self.assertIn(b"matches ['t\\xebnt', 't\\xebxt']\r\n", output)
|
||||
expected = br"'[\xefnserted]|t\xebxt[after]'"
|
||||
self.assertIn(b"result " + expected + b"\r\n", output)
|
||||
self.assertIn(b"history " + expected + b"\r\n", output)
|
||||
|
||||
|
||||
def run_pty(script, input=b"dummy input\r"):
|
||||
pty = import_module('pty')
|
||||
output = bytearray()
|
||||
[master, slave] = pty.openpty()
|
||||
args = (sys.executable, '-c', script)
|
||||
proc = subprocess.Popen(args, stdin=slave, stdout=slave, stderr=slave)
|
||||
os.close(slave)
|
||||
with ExitStack() as cleanup:
|
||||
cleanup.enter_context(proc)
|
||||
def terminate(proc):
|
||||
try:
|
||||
proc.terminate()
|
||||
except ProcessLookupError:
|
||||
# Workaround for Open/Net BSD bug (Issue 16762)
|
||||
pass
|
||||
cleanup.callback(terminate, proc)
|
||||
cleanup.callback(os.close, master)
|
||||
# Avoid using DefaultSelector and PollSelector. Kqueue() does not
|
||||
# work with pseudo-terminals on OS X < 10.9 (Issue 20365) and Open
|
||||
# BSD (Issue 20667). Poll() does not work with OS X 10.6 or 10.4
|
||||
# either (Issue 20472). Hopefully the file descriptor is low enough
|
||||
# to use with select().
|
||||
sel = cleanup.enter_context(selectors.SelectSelector())
|
||||
sel.register(master, selectors.EVENT_READ | selectors.EVENT_WRITE)
|
||||
os.set_blocking(master, False)
|
||||
while True:
|
||||
for [_, events] in sel.select():
|
||||
if events & selectors.EVENT_READ:
|
||||
try:
|
||||
chunk = os.read(master, 0x10000)
|
||||
except OSError as err:
|
||||
# Linux raises EIO when slave is closed (Issue 5380)
|
||||
if err.errno != EIO:
|
||||
raise
|
||||
chunk = b""
|
||||
if not chunk:
|
||||
return output
|
||||
output.extend(chunk)
|
||||
if events & selectors.EVENT_WRITE:
|
||||
try:
|
||||
input = input[os.write(master, input):]
|
||||
except OSError as err:
|
||||
# Apparently EIO means the slave was closed
|
||||
if err.errno != EIO:
|
||||
raise
|
||||
input = b"" # Stop writing
|
||||
if not input:
|
||||
sel.modify(master, selectors.EVENT_READ)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -157,6 +157,12 @@ class SymtableTest(unittest.TestCase):
|
|||
self.fail("no SyntaxError for %r" % (brokencode,))
|
||||
checkfilename("def f(x): foo)(") # parse-time
|
||||
checkfilename("def f(x): global x") # symtable-build-time
|
||||
symtable.symtable("pass", b"spam", "exec")
|
||||
with self.assertRaises(TypeError):
|
||||
symtable.symtable("pass", bytearray(b"spam"), "exec")
|
||||
symtable.symtable("pass", memoryview(b"spam"), "exec")
|
||||
with self.assertRaises(TypeError):
|
||||
symtable.symtable("pass", list(b"spam"), "exec")
|
||||
|
||||
def test_eval(self):
|
||||
symbols = symtable.symtable("42", "?", "eval")
|
||||
|
|
|
@ -248,6 +248,11 @@ Overridden parameters
|
|||
...
|
||||
TypeError: f() got multiple values for keyword argument 'x'
|
||||
|
||||
>>> f(x=5, **{'x': 3}, **{'x': 2})
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
TypeError: f() got multiple values for keyword argument 'x'
|
||||
|
||||
>>> f(**{1: 3}, **{1: 5})
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
|
|
|
@ -18,7 +18,7 @@ import weakref
|
|||
|
||||
from itertools import product
|
||||
from test import support
|
||||
from test.support import TESTFN, findfile, import_fresh_module, gc_collect
|
||||
from test.support import TESTFN, findfile, import_fresh_module, gc_collect, swap_attr
|
||||
|
||||
# pyET is the pure-Python implementation.
|
||||
#
|
||||
|
@ -1860,6 +1860,12 @@ class BadElementTest(ElementTestCase, unittest.TestCase):
|
|||
e.extend([ET.Element('bar')])
|
||||
self.assertRaises(ValueError, e.remove, X('baz'))
|
||||
|
||||
def test_recursive_repr(self):
|
||||
# Issue #25455
|
||||
e = ET.Element('foo')
|
||||
with swap_attr(e, 'tag', e):
|
||||
with self.assertRaises(RuntimeError):
|
||||
repr(e) # Should not crash
|
||||
|
||||
class MutatingElementPath(str):
|
||||
def __new__(cls, elem, *args):
|
||||
|
|
|
@ -600,6 +600,19 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
|
|||
finally:
|
||||
os.remove(filename)
|
||||
|
||||
def testBytesPath(self):
|
||||
filename = support.TESTFN + ".zip"
|
||||
self.addCleanup(support.unlink, filename)
|
||||
with ZipFile(filename, "w") as z:
|
||||
zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW))
|
||||
zinfo.compress_type = self.compression
|
||||
z.writestr(zinfo, test_src)
|
||||
|
||||
zipimport.zipimporter(filename)
|
||||
zipimport.zipimporter(os.fsencode(filename))
|
||||
zipimport.zipimporter(bytearray(os.fsencode(filename)))
|
||||
zipimport.zipimporter(memoryview(os.fsencode(filename)))
|
||||
|
||||
|
||||
@support.requires_zlib
|
||||
class CompressedZipImportTestCase(UncompressedZipImportTestCase):
|
||||
|
@ -620,6 +633,8 @@ class BadFileZipImportTestCase(unittest.TestCase):
|
|||
def testBadArgs(self):
|
||||
self.assertRaises(TypeError, zipimport.zipimporter, None)
|
||||
self.assertRaises(TypeError, zipimport.zipimporter, TESTMOD, kwd=None)
|
||||
self.assertRaises(TypeError, zipimport.zipimporter,
|
||||
list(os.fsencode(TESTMOD)))
|
||||
|
||||
def testFilenameTooLong(self):
|
||||
self.assertZipFailure('A' * 33000)
|
||||
|
|
|
@ -271,7 +271,7 @@ class Variable:
|
|||
|
||||
Return the name of the callback.
|
||||
"""
|
||||
f = CallWrapper(callback, None, self).__call__
|
||||
f = CallWrapper(callback, None, self._root).__call__
|
||||
cbname = repr(id(f))
|
||||
try:
|
||||
callback = callback.__func__
|
||||
|
@ -295,14 +295,19 @@ class Variable:
|
|||
CBNAME is the name of the callback returned from trace_variable or trace.
|
||||
"""
|
||||
self._tk.call("trace", "vdelete", self._name, mode, cbname)
|
||||
self._tk.deletecommand(cbname)
|
||||
try:
|
||||
self._tclCommands.remove(cbname)
|
||||
except ValueError:
|
||||
pass
|
||||
cbname = self._tk.splitlist(cbname)[0]
|
||||
for m, ca in self.trace_vinfo():
|
||||
if self._tk.splitlist(ca)[0] == cbname:
|
||||
break
|
||||
else:
|
||||
self._tk.deletecommand(cbname)
|
||||
try:
|
||||
self._tclCommands.remove(cbname)
|
||||
except ValueError:
|
||||
pass
|
||||
def trace_vinfo(self):
|
||||
"""Return all trace callback information."""
|
||||
return [self._tk.split(x) for x in self._tk.splitlist(
|
||||
return [self._tk.splitlist(x) for x in self._tk.splitlist(
|
||||
self._tk.call("trace", "vinfo", self._name))]
|
||||
def __eq__(self, other):
|
||||
"""Comparison for equality (==).
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import unittest
|
||||
|
||||
import gc
|
||||
from tkinter import (Variable, StringVar, IntVar, DoubleVar, BooleanVar, Tcl,
|
||||
TclError)
|
||||
|
||||
|
@ -87,6 +87,55 @@ class TestVariable(TestBase):
|
|||
v.set("value")
|
||||
self.assertTrue(v.side_effect)
|
||||
|
||||
def test_trace(self):
|
||||
v = Variable(self.root)
|
||||
vname = str(v)
|
||||
trace = []
|
||||
def read_tracer(*args):
|
||||
trace.append(('read',) + args)
|
||||
def write_tracer(*args):
|
||||
trace.append(('write',) + args)
|
||||
cb1 = v.trace_variable('r', read_tracer)
|
||||
cb2 = v.trace_variable('wu', write_tracer)
|
||||
self.assertEqual(sorted(v.trace_vinfo()), [('r', cb1), ('wu', cb2)])
|
||||
self.assertEqual(trace, [])
|
||||
|
||||
v.set('spam')
|
||||
self.assertEqual(trace, [('write', vname, '', 'w')])
|
||||
|
||||
trace = []
|
||||
v.get()
|
||||
self.assertEqual(trace, [('read', vname, '', 'r')])
|
||||
|
||||
trace = []
|
||||
info = sorted(v.trace_vinfo())
|
||||
v.trace_vdelete('w', cb1) # Wrong mode
|
||||
self.assertEqual(sorted(v.trace_vinfo()), info)
|
||||
with self.assertRaises(TclError):
|
||||
v.trace_vdelete('r', 'spam') # Wrong command name
|
||||
self.assertEqual(sorted(v.trace_vinfo()), info)
|
||||
v.trace_vdelete('r', (cb1, 43)) # Wrong arguments
|
||||
self.assertEqual(sorted(v.trace_vinfo()), info)
|
||||
v.get()
|
||||
self.assertEqual(trace, [('read', vname, '', 'r')])
|
||||
|
||||
trace = []
|
||||
v.trace_vdelete('r', cb1)
|
||||
self.assertEqual(v.trace_vinfo(), [('wu', cb2)])
|
||||
v.get()
|
||||
self.assertEqual(trace, [])
|
||||
|
||||
trace = []
|
||||
del write_tracer
|
||||
gc.collect()
|
||||
v.set('eggs')
|
||||
self.assertEqual(trace, [('write', vname, '', 'w')])
|
||||
|
||||
trace = []
|
||||
del v
|
||||
gc.collect()
|
||||
self.assertEqual(trace, [('write', vname, '', 'u')])
|
||||
|
||||
|
||||
class TestStringVar(TestBase):
|
||||
|
||||
|
|
|
@ -1486,6 +1486,57 @@ class TreeviewTest(AbstractWidgetTest, unittest.TestCase):
|
|||
value)
|
||||
|
||||
|
||||
def test_selection(self):
|
||||
# item 'none' doesn't exist
|
||||
self.assertRaises(tkinter.TclError, self.tv.selection_set, 'none')
|
||||
self.assertRaises(tkinter.TclError, self.tv.selection_add, 'none')
|
||||
self.assertRaises(tkinter.TclError, self.tv.selection_remove, 'none')
|
||||
self.assertRaises(tkinter.TclError, self.tv.selection_toggle, 'none')
|
||||
|
||||
item1 = self.tv.insert('', 'end')
|
||||
item2 = self.tv.insert('', 'end')
|
||||
c1 = self.tv.insert(item1, 'end')
|
||||
c2 = self.tv.insert(item1, 'end')
|
||||
c3 = self.tv.insert(item1, 'end')
|
||||
self.assertEqual(self.tv.selection(), ())
|
||||
|
||||
self.tv.selection_set((c1, item2))
|
||||
self.assertEqual(self.tv.selection(), (c1, item2))
|
||||
self.tv.selection_set(c2)
|
||||
self.assertEqual(self.tv.selection(), (c2,))
|
||||
|
||||
self.tv.selection_add((c1, item2))
|
||||
self.assertEqual(self.tv.selection(), (c1, c2, item2))
|
||||
self.tv.selection_add(item1)
|
||||
self.assertEqual(self.tv.selection(), (item1, c1, c2, item2))
|
||||
|
||||
self.tv.selection_remove((item1, c3))
|
||||
self.assertEqual(self.tv.selection(), (c1, c2, item2))
|
||||
self.tv.selection_remove(c2)
|
||||
self.assertEqual(self.tv.selection(), (c1, item2))
|
||||
|
||||
self.tv.selection_toggle((c1, c3))
|
||||
self.assertEqual(self.tv.selection(), (c3, item2))
|
||||
self.tv.selection_toggle(item2)
|
||||
self.assertEqual(self.tv.selection(), (c3,))
|
||||
|
||||
self.tv.insert('', 'end', id='with spaces')
|
||||
self.tv.selection_set('with spaces')
|
||||
self.assertEqual(self.tv.selection(), ('with spaces',))
|
||||
|
||||
self.tv.insert('', 'end', id='{brace')
|
||||
self.tv.selection_set('{brace')
|
||||
self.assertEqual(self.tv.selection(), ('{brace',))
|
||||
|
||||
self.tv.insert('', 'end', id='unicode\u20ac')
|
||||
self.tv.selection_set('unicode\u20ac')
|
||||
self.assertEqual(self.tv.selection(), ('unicode\u20ac',))
|
||||
|
||||
self.tv.insert('', 'end', id=b'bytes\xe2\x82\xac')
|
||||
self.tv.selection_set(b'bytes\xe2\x82\xac')
|
||||
self.assertEqual(self.tv.selection(), ('bytes\xe2\x82\xac',))
|
||||
|
||||
|
||||
def test_set(self):
|
||||
self.tv['columns'] = ['A', 'B']
|
||||
item = self.tv.insert('', 'end', values=['a', 'b'])
|
||||
|
|
|
@ -1392,7 +1392,9 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
|
|||
|
||||
def selection(self, selop=None, items=None):
|
||||
"""If selop is not specified, returns selected items."""
|
||||
return self.tk.call(self._w, "selection", selop, items)
|
||||
if isinstance(items, (str, bytes)):
|
||||
items = (items,)
|
||||
return self.tk.splitlist(self.tk.call(self._w, "selection", selop, items))
|
||||
|
||||
|
||||
def selection_set(self, items):
|
||||
|
|
|
@ -179,7 +179,7 @@ def config_dict(filename):
|
|||
continue
|
||||
try:
|
||||
key, value = line.split("=")
|
||||
except:
|
||||
except ValueError:
|
||||
print("Bad line in config-file %s:\n%s" % (filename,line))
|
||||
continue
|
||||
key = key.strip()
|
||||
|
@ -192,7 +192,7 @@ def config_dict(filename):
|
|||
value = float(value)
|
||||
else:
|
||||
value = int(value)
|
||||
except:
|
||||
except ValueError:
|
||||
pass # value need not be converted
|
||||
cfgdict[key] = value
|
||||
return cfgdict
|
||||
|
@ -220,7 +220,7 @@ def readconfig(cfgdict):
|
|||
try:
|
||||
head, tail = split(__file__)
|
||||
cfg_file2 = join(head, default_cfg)
|
||||
except:
|
||||
except Exception:
|
||||
cfg_file2 = ""
|
||||
if isfile(cfg_file2):
|
||||
cfgdict2 = config_dict(cfg_file2)
|
||||
|
@ -229,7 +229,7 @@ def readconfig(cfgdict):
|
|||
|
||||
try:
|
||||
readconfig(_CFG)
|
||||
except:
|
||||
except Exception:
|
||||
print ("No configfile read, reason unknown")
|
||||
|
||||
|
||||
|
@ -653,7 +653,7 @@ class TurtleScreenBase(object):
|
|||
x, y = (self.cv.canvasx(event.x)/self.xscale,
|
||||
-self.cv.canvasy(event.y)/self.yscale)
|
||||
fun(x, y)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
self.cv.tag_bind(item, "<Button%s-Motion>" % num, eventfun, add)
|
||||
|
||||
|
@ -1158,7 +1158,7 @@ class TurtleScreen(TurtleScreenBase):
|
|||
raise TurtleGraphicsError("bad color string: %s" % str(color))
|
||||
try:
|
||||
r, g, b = color
|
||||
except:
|
||||
except (TypeError, ValueError):
|
||||
raise TurtleGraphicsError("bad color arguments: %s" % str(color))
|
||||
if self._colormode == 1.0:
|
||||
r, g, b = [round(255.0*x) for x in (r, g, b)]
|
||||
|
@ -2702,7 +2702,7 @@ class RawTurtle(TPen, TNavigator):
|
|||
return args
|
||||
try:
|
||||
r, g, b = args
|
||||
except:
|
||||
except (TypeError, ValueError):
|
||||
raise TurtleGraphicsError("bad color arguments: %s" % str(args))
|
||||
if self.screen._colormode == 1.0:
|
||||
r, g, b = [round(255.0*x) for x in (r, g, b)]
|
||||
|
@ -3865,7 +3865,7 @@ def read_docstrings(lang):
|
|||
try:
|
||||
# eval(key).im_func.__doc__ = docsdict[key]
|
||||
eval(key).__doc__ = docsdict[key]
|
||||
except:
|
||||
except Exception:
|
||||
print("Bad docstring-entry: %s" % key)
|
||||
|
||||
_LANGUAGE = _CFG["language"]
|
||||
|
@ -3875,7 +3875,7 @@ try:
|
|||
read_docstrings(_LANGUAGE)
|
||||
except ImportError:
|
||||
print("Cannot find docsdict for", _LANGUAGE)
|
||||
except:
|
||||
except Exception:
|
||||
print ("Unknown Error when trying to import %s-docstring-dictionary" %
|
||||
_LANGUAGE)
|
||||
|
||||
|
|
|
@ -1694,6 +1694,7 @@ _non_defaults = {
|
|||
'__reduce__', '__reduce_ex__', '__getinitargs__', '__getnewargs__',
|
||||
'__getstate__', '__setstate__', '__getformat__', '__setformat__',
|
||||
'__repr__', '__dir__', '__subclasses__', '__format__',
|
||||
'__getnewargs_ex__',
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -68,8 +68,6 @@ for idx, value in enumerate(sys.argv):
|
|||
break
|
||||
|
||||
# Now it is safe to import idlelib.
|
||||
from idlelib import macosxSupport
|
||||
macosxSupport._appbundle = True
|
||||
from idlelib.PyShell import main
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -1655,6 +1655,7 @@ Uwe Zessin
|
|||
Cheng Zhang
|
||||
Kai Zhu
|
||||
Tarek Ziadé
|
||||
Jelle Zijlstra
|
||||
Gennadiy Zlobin
|
||||
Doug Zongker
|
||||
Peter Åstrand
|
||||
|
|
63
Misc/NEWS
63
Misc/NEWS
|
@ -2,8 +2,8 @@
|
|||
Python News
|
||||
+++++++++++
|
||||
|
||||
What's New in Python 3.5.3rc1?
|
||||
==============================
|
||||
What's New in Python 3.5.3 release candidate 1?
|
||||
===============================================
|
||||
|
||||
Release date: TBA
|
||||
|
||||
|
@ -13,6 +13,65 @@ Core and Builtins
|
|||
Library
|
||||
-------
|
||||
|
||||
- Issue #22115: Fixed tracing Tkinter variables: trace_vdelete() with wrong
|
||||
mode no longer break tracing, trace_vinfo() now always returns a list of
|
||||
pairs of strings, tracing in the "u" mode now works.
|
||||
|
||||
- Fix a scoping issue in importlib.util.LazyLoader which triggered an
|
||||
UnboundLocalError when lazy-loading a module that was already put into
|
||||
sys.modules.
|
||||
|
||||
- Issue #27079: Fixed curses.ascii functions isblank(), iscntrl() and ispunct().
|
||||
|
||||
- Issue #26754: Some functions (compile() etc) accepted a filename argument
|
||||
encoded as an iterable of integers. Now only strings and byte-like objects
|
||||
are accepted.
|
||||
|
||||
- Issue #27048: Prevents distutils failing on Windows when environment
|
||||
variables contain non-ASCII characters
|
||||
|
||||
- Issue #27330: Fixed possible leaks in the ctypes module.
|
||||
|
||||
- Issue #27238: Got rid of bare excepts in the turtle module. Original patch
|
||||
by Jelle Zijlstra.
|
||||
|
||||
- Issue #27122: When an exception is raised within the context being managed
|
||||
by a contextlib.ExitStack() and one of the exit stack generators
|
||||
catches and raises it in a chain, do not re-raise the original exception
|
||||
when exiting, let the new chained one through. This avoids the PEP 479
|
||||
bug described in issue25782.
|
||||
|
||||
- Issue #27278: Fix os.urandom() implementation using getrandom() on Linux.
|
||||
Truncate size to INT_MAX and loop until we collected enough random bytes,
|
||||
instead of casting a directly Py_ssize_t to int.
|
||||
|
||||
- Issue #26386: Fixed ttk.TreeView selection operations with item id's
|
||||
containing spaces.
|
||||
|
||||
- Issue #22636: Avoid shell injection problems with
|
||||
ctypes.util.find_library().
|
||||
|
||||
- Issue #16182: Fix various functions in the "readline" module to use the
|
||||
locale encoding, and fix get_begidx() and get_endidx() to return code point
|
||||
indexes.
|
||||
|
||||
- Issue #26930: Update Windows builds to use OpenSSL 1.0.2h.
|
||||
|
||||
IDLE
|
||||
----
|
||||
|
||||
- Issue #27365: Allow non-ascii chars in IDLE NEWS.txt, for contributor names.
|
||||
|
||||
- Issue #27245: IDLE: Cleanly delete custom themes and key bindings.
|
||||
Previously, when IDLE was started from a console or by import, a cascade
|
||||
of warnings was emitted. Patch by Serhiy Storchaka.
|
||||
|
||||
C API
|
||||
-----
|
||||
|
||||
- Issue #26754: PyUnicode_FSDecoder() accepted a filename argument encoded as
|
||||
an iterable of integers. Now only strings and bytes-like objects are accepted.
|
||||
|
||||
|
||||
What's New in Python 3.5.2?
|
||||
===========================
|
||||
|
|
|
@ -1124,7 +1124,7 @@ CharArray_get_raw(CDataObject *self)
|
|||
static PyObject *
|
||||
CharArray_get_value(CDataObject *self)
|
||||
{
|
||||
int i;
|
||||
Py_ssize_t i;
|
||||
char *ptr = self->b_ptr;
|
||||
for (i = 0; i < self->b_size; ++i)
|
||||
if (*ptr++ == '\0')
|
||||
|
@ -1180,9 +1180,9 @@ static PyGetSetDef CharArray_getsets[] = {
|
|||
static PyObject *
|
||||
WCharArray_get_value(CDataObject *self)
|
||||
{
|
||||
unsigned int i;
|
||||
Py_ssize_t i;
|
||||
wchar_t *ptr = (wchar_t *)self->b_ptr;
|
||||
for (i = 0; i < self->b_size/sizeof(wchar_t); ++i)
|
||||
for (i = 0; i < self->b_size/(Py_ssize_t)sizeof(wchar_t); ++i)
|
||||
if (*ptr++ == (wchar_t)0)
|
||||
break;
|
||||
return PyUnicode_FromWideChar((wchar_t *)self->b_ptr, i);
|
||||
|
@ -1211,7 +1211,7 @@ WCharArray_set_value(CDataObject *self, PyObject *value)
|
|||
wstr = PyUnicode_AsUnicodeAndSize(value, &len);
|
||||
if (wstr == NULL)
|
||||
return -1;
|
||||
if ((unsigned)len > self->b_size/sizeof(wchar_t)) {
|
||||
if ((size_t)len > self->b_size/sizeof(wchar_t)) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"string too long");
|
||||
result = -1;
|
||||
|
@ -1252,8 +1252,10 @@ add_methods(PyTypeObject *type, PyMethodDef *meth)
|
|||
descr = PyDescr_NewMethod(type, meth);
|
||||
if (descr == NULL)
|
||||
return -1;
|
||||
if (PyDict_SetItemString(dict,meth->ml_name, descr) < 0)
|
||||
if (PyDict_SetItemString(dict, meth->ml_name, descr) < 0) {
|
||||
Py_DECREF(descr);
|
||||
return -1;
|
||||
}
|
||||
Py_DECREF(descr);
|
||||
}
|
||||
return 0;
|
||||
|
@ -1268,8 +1270,10 @@ add_members(PyTypeObject *type, PyMemberDef *memb)
|
|||
descr = PyDescr_NewMember(type, memb);
|
||||
if (descr == NULL)
|
||||
return -1;
|
||||
if (PyDict_SetItemString(dict, memb->name, descr) < 0)
|
||||
if (PyDict_SetItemString(dict, memb->name, descr) < 0) {
|
||||
Py_DECREF(descr);
|
||||
return -1;
|
||||
}
|
||||
Py_DECREF(descr);
|
||||
}
|
||||
return 0;
|
||||
|
@ -1285,8 +1289,10 @@ add_getset(PyTypeObject *type, PyGetSetDef *gsp)
|
|||
descr = PyDescr_NewGetSet(type, gsp);
|
||||
if (descr == NULL)
|
||||
return -1;
|
||||
if (PyDict_SetItemString(dict, gsp->name, descr) < 0)
|
||||
if (PyDict_SetItemString(dict, gsp->name, descr) < 0) {
|
||||
Py_DECREF(descr);
|
||||
return -1;
|
||||
}
|
||||
Py_DECREF(descr);
|
||||
}
|
||||
return 0;
|
||||
|
@ -1778,6 +1784,7 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject
|
|||
|
||||
newname = PyUnicode_Concat(name, suffix);
|
||||
if (newname == NULL) {
|
||||
Py_DECREF(swapped_args);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
@ -1797,8 +1804,10 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject
|
|||
|
||||
stgdict = (StgDictObject *)PyObject_CallObject(
|
||||
(PyObject *)&PyCStgDict_Type, NULL);
|
||||
if (!stgdict) /* XXX leaks result! */
|
||||
if (!stgdict) {
|
||||
Py_DECREF(result);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
stgdict->ffi_type_pointer = *fmt->pffi_type;
|
||||
stgdict->align = fmt->pffi_type->alignment;
|
||||
|
@ -1978,8 +1987,10 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
|||
PyObject *meth;
|
||||
int x;
|
||||
meth = PyDescr_NewClassMethod(result, ml);
|
||||
if (!meth)
|
||||
if (!meth) {
|
||||
Py_DECREF(result);
|
||||
return NULL;
|
||||
}
|
||||
x = PyDict_SetItemString(result->tp_dict,
|
||||
ml->ml_name,
|
||||
meth);
|
||||
|
@ -2159,8 +2170,10 @@ converters_from_argtypes(PyObject *ob)
|
|||
|
||||
nArgs = PyTuple_GET_SIZE(ob);
|
||||
converters = PyTuple_New(nArgs);
|
||||
if (!converters)
|
||||
if (!converters) {
|
||||
Py_DECREF(ob);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* I have to check if this is correct. Using c_char, which has a size
|
||||
of 1, will be assumed to be pushed as only one byte!
|
||||
|
@ -4052,14 +4065,9 @@ _init_pos_args(PyObject *self, PyTypeObject *type,
|
|||
}
|
||||
val = PyTuple_GET_ITEM(args, i + index);
|
||||
if (kwds && PyDict_GetItem(kwds, name)) {
|
||||
char *field = PyBytes_AsString(name);
|
||||
if (field == NULL) {
|
||||
PyErr_Clear();
|
||||
field = "???";
|
||||
}
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"duplicate values for field '%s'",
|
||||
field);
|
||||
"duplicate values for field %R",
|
||||
name);
|
||||
Py_DECREF(pair);
|
||||
Py_DECREF(name);
|
||||
return -1;
|
||||
|
|
|
@ -157,8 +157,10 @@ _ctypes_get_errobj(int **pspace)
|
|||
return NULL;
|
||||
memset(space, 0, sizeof(int) * 2);
|
||||
errobj = PyCapsule_New(space, CTYPES_CAPSULE_NAME_PYMEM, pymem_destructor);
|
||||
if (errobj == NULL)
|
||||
if (errobj == NULL) {
|
||||
PyMem_Free(space);
|
||||
return NULL;
|
||||
}
|
||||
if (-1 == PyDict_SetItem(dict, error_object_name,
|
||||
errobj)) {
|
||||
Py_DECREF(errobj);
|
||||
|
@ -1681,6 +1683,10 @@ POINTER(PyObject *self, PyObject *cls)
|
|||
if (result == NULL)
|
||||
return result;
|
||||
key = PyLong_FromVoidPtr(result);
|
||||
if (key == NULL) {
|
||||
Py_DECREF(result);
|
||||
return NULL;
|
||||
}
|
||||
} else if (PyType_Check(cls)) {
|
||||
typ = (PyTypeObject *)cls;
|
||||
buf = PyMem_Malloc(strlen(typ->tp_name) + 3 + 1);
|
||||
|
|
|
@ -1246,8 +1246,7 @@ U_set(void *ptr, PyObject *value, Py_ssize_t length)
|
|||
"unicode string expected instead of %s instance",
|
||||
value->ob_type->tp_name);
|
||||
return NULL;
|
||||
} else
|
||||
Py_INCREF(value);
|
||||
}
|
||||
|
||||
wstr = PyUnicode_AsUnicodeAndSize(value, &size);
|
||||
if (wstr == NULL)
|
||||
|
@ -1256,7 +1255,6 @@ U_set(void *ptr, PyObject *value, Py_ssize_t length)
|
|||
PyErr_Format(PyExc_ValueError,
|
||||
"string too long (%zd, maximum length %zd)",
|
||||
size, length);
|
||||
Py_DECREF(value);
|
||||
return NULL;
|
||||
} else if (size < length-1)
|
||||
/* copy terminating NUL character if there is space */
|
||||
|
@ -1266,6 +1264,7 @@ U_set(void *ptr, PyObject *value, Py_ssize_t length)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
Py_INCREF(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
|
@ -1292,9 +1291,7 @@ s_set(void *ptr, PyObject *value, Py_ssize_t length)
|
|||
char *data;
|
||||
Py_ssize_t size;
|
||||
|
||||
if(PyBytes_Check(value)) {
|
||||
Py_INCREF(value);
|
||||
} else {
|
||||
if(!PyBytes_Check(value)) {
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"expected bytes, %s found",
|
||||
value->ob_type->tp_name);
|
||||
|
@ -1302,11 +1299,9 @@ s_set(void *ptr, PyObject *value, Py_ssize_t length)
|
|||
}
|
||||
|
||||
data = PyBytes_AS_STRING(value);
|
||||
if (!data)
|
||||
return NULL;
|
||||
size = strlen(data); /* XXX Why not Py_SIZE(value)? */
|
||||
if (size < length) {
|
||||
/* This will copy the leading NUL character
|
||||
/* This will copy the terminating NUL character
|
||||
* if there is space for it.
|
||||
*/
|
||||
++size;
|
||||
|
@ -1314,13 +1309,11 @@ s_set(void *ptr, PyObject *value, Py_ssize_t length)
|
|||
PyErr_Format(PyExc_ValueError,
|
||||
"bytes too long (%zd, maximum length %zd)",
|
||||
size, length);
|
||||
Py_DECREF(value);
|
||||
return NULL;
|
||||
}
|
||||
/* Also copy the terminating NUL character if there is space */
|
||||
memcpy((char *)ptr, data, size);
|
||||
|
||||
Py_DECREF(value);
|
||||
_RET(value);
|
||||
}
|
||||
|
||||
|
@ -1428,9 +1421,7 @@ BSTR_set(void *ptr, PyObject *value, Py_ssize_t size)
|
|||
/* convert value into a PyUnicodeObject or NULL */
|
||||
if (Py_None == value) {
|
||||
value = NULL;
|
||||
} else if (PyUnicode_Check(value)) {
|
||||
Py_INCREF(value); /* for the descref below */
|
||||
} else {
|
||||
} else if (!PyUnicode_Check(value)) {
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"unicode string expected instead of %s instance",
|
||||
value->ob_type->tp_name);
|
||||
|
@ -1449,7 +1440,6 @@ BSTR_set(void *ptr, PyObject *value, Py_ssize_t size)
|
|||
return NULL;
|
||||
}
|
||||
bstr = SysAllocStringLen(wvalue, (unsigned)wsize);
|
||||
Py_DECREF(value);
|
||||
} else
|
||||
bstr = NULL;
|
||||
|
||||
|
|
|
@ -2630,12 +2630,18 @@ PyDecType_FromSequenceExact(PyTypeObject *type, PyObject *v,
|
|||
|
||||
/* class method */
|
||||
static PyObject *
|
||||
dec_from_float(PyObject *dec, PyObject *pyfloat)
|
||||
dec_from_float(PyObject *type, PyObject *pyfloat)
|
||||
{
|
||||
PyObject *context;
|
||||
PyObject *result;
|
||||
|
||||
CURRENT_CONTEXT(context);
|
||||
return PyDecType_FromFloatExact((PyTypeObject *)dec, pyfloat, context);
|
||||
result = PyDecType_FromFloatExact(&PyDec_Type, pyfloat, context);
|
||||
if (type != (PyObject *)&PyDec_Type && result != NULL) {
|
||||
Py_SETREF(result, PyObject_CallFunctionObjArgs(type, result, NULL));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* create_decimal_from_float */
|
||||
|
|
|
@ -1582,10 +1582,23 @@ _elementtree_Element_remove_impl(ElementObject *self, PyObject *subelement)
|
|||
static PyObject*
|
||||
element_repr(ElementObject* self)
|
||||
{
|
||||
if (self->tag)
|
||||
return PyUnicode_FromFormat("<Element %R at %p>", self->tag, self);
|
||||
else
|
||||
int status;
|
||||
|
||||
if (self->tag == NULL)
|
||||
return PyUnicode_FromFormat("<Element at %p>", self);
|
||||
|
||||
status = Py_ReprEnter((PyObject *)self);
|
||||
if (status == 0) {
|
||||
PyObject *res;
|
||||
res = PyUnicode_FromFormat("<Element %R at %p>", self->tag, self);
|
||||
Py_ReprLeave((PyObject *)self);
|
||||
return res;
|
||||
}
|
||||
if (status > 0)
|
||||
PyErr_Format(PyExc_RuntimeError,
|
||||
"reentrant call inside %s.__repr__",
|
||||
Py_TYPE(self)->tp_name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/*[clinic input]
|
||||
|
|
|
@ -203,40 +203,45 @@ static PyGetSetDef partial_getsetlist[] = {
|
|||
static PyObject *
|
||||
partial_repr(partialobject *pto)
|
||||
{
|
||||
PyObject *result;
|
||||
PyObject *result = NULL;
|
||||
PyObject *arglist;
|
||||
PyObject *tmp;
|
||||
Py_ssize_t i, n;
|
||||
PyObject *key, *value;
|
||||
int status;
|
||||
|
||||
status = Py_ReprEnter((PyObject *)pto);
|
||||
if (status != 0) {
|
||||
if (status < 0)
|
||||
return NULL;
|
||||
return PyUnicode_FromFormat("%s(...)", Py_TYPE(pto)->tp_name);
|
||||
}
|
||||
|
||||
arglist = PyUnicode_FromString("");
|
||||
if (arglist == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (arglist == NULL)
|
||||
goto done;
|
||||
/* Pack positional arguments */
|
||||
assert (PyTuple_Check(pto->args));
|
||||
n = PyTuple_GET_SIZE(pto->args);
|
||||
for (i = 0; i < n; i++) {
|
||||
tmp = PyUnicode_FromFormat("%U, %R", arglist,
|
||||
PyTuple_GET_ITEM(pto->args, i));
|
||||
Py_DECREF(arglist);
|
||||
if (tmp == NULL)
|
||||
return NULL;
|
||||
arglist = tmp;
|
||||
Py_SETREF(arglist, PyUnicode_FromFormat("%U, %R", arglist,
|
||||
PyTuple_GET_ITEM(pto->args, i)));
|
||||
if (arglist == NULL)
|
||||
goto done;
|
||||
}
|
||||
/* Pack keyword arguments */
|
||||
assert (PyDict_Check(pto->kw));
|
||||
for (i = 0; PyDict_Next(pto->kw, &i, &key, &value);) {
|
||||
tmp = PyUnicode_FromFormat("%U, %U=%R", arglist,
|
||||
key, value);
|
||||
Py_DECREF(arglist);
|
||||
if (tmp == NULL)
|
||||
return NULL;
|
||||
arglist = tmp;
|
||||
Py_SETREF(arglist, PyUnicode_FromFormat("%U, %U=%R", arglist,
|
||||
key, value));
|
||||
if (arglist == NULL)
|
||||
goto done;
|
||||
}
|
||||
result = PyUnicode_FromFormat("%s(%R%U)", Py_TYPE(pto)->tp_name,
|
||||
pto->fn, arglist);
|
||||
Py_DECREF(arglist);
|
||||
|
||||
done:
|
||||
Py_ReprLeave((PyObject *)pto);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -164,6 +164,10 @@ int pysqlite_connection_init(pysqlite_Connection* self, PyObject* args, PyObject
|
|||
#ifdef WITH_THREAD
|
||||
self->thread_ident = PyThread_get_thread_ident();
|
||||
#endif
|
||||
if (!check_same_thread && sqlite3_libversion_number() < 3003001) {
|
||||
PyErr_SetString(pysqlite_NotSupportedError, "shared connections not available");
|
||||
return -1;
|
||||
}
|
||||
self->check_same_thread = check_same_thread;
|
||||
|
||||
self->function_pinboard = PyDict_New();
|
||||
|
|
|
@ -42,7 +42,7 @@ extern PyObject* pysqlite_NotSupportedError;
|
|||
extern PyObject* time_time;
|
||||
extern PyObject* time_sleep;
|
||||
|
||||
/* A dictionary, mapping colum types (INTEGER, VARCHAR, etc.) to converter
|
||||
/* A dictionary, mapping column types (INTEGER, VARCHAR, etc.) to converter
|
||||
* functions, that convert the SQL value to the appropriate Python value.
|
||||
* The key is uppercase.
|
||||
*/
|
||||
|
|
|
@ -1268,7 +1268,7 @@ array_array_buffer_info_impl(arrayobject *self)
|
|||
}
|
||||
PyTuple_SET_ITEM(retval, 0, v);
|
||||
|
||||
v = PyLong_FromLong((long)(Py_SIZE(self)));
|
||||
v = PyLong_FromSsize_t(Py_SIZE(self));
|
||||
if (v == NULL) {
|
||||
Py_DECREF(retval);
|
||||
return NULL;
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
See the file COPYING for copying permission.
|
||||
*/
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
#ifdef COMPILED_FROM_DSP
|
||||
#include "winconfig.h"
|
||||
#elif defined(MACOS_CLASSIC)
|
||||
|
@ -16,8 +18,6 @@
|
|||
#endif
|
||||
#endif /* ndef COMPILED_FROM_DSP */
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
#include "expat_external.h"
|
||||
#include "internal.h"
|
||||
#include "xmlrole.h"
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
See the file COPYING for copying permission.
|
||||
*/
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
#ifdef COMPILED_FROM_DSP
|
||||
#include "winconfig.h"
|
||||
#elif defined(MACOS_CLASSIC)
|
||||
|
@ -16,8 +18,6 @@
|
|||
#endif
|
||||
#endif /* ndef COMPILED_FROM_DSP */
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
#include "expat_external.h"
|
||||
#include "internal.h"
|
||||
#include "xmltok.h"
|
||||
|
|
|
@ -128,20 +128,40 @@ static PyModuleDef readlinemodule;
|
|||
#define readlinestate_global ((readlinestate *)PyModule_GetState(PyState_FindModule(&readlinemodule)))
|
||||
|
||||
|
||||
/* Convert to/from multibyte C strings */
|
||||
|
||||
static PyObject *
|
||||
encode(PyObject *b)
|
||||
{
|
||||
return PyUnicode_EncodeLocale(b, "surrogateescape");
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
decode(const char *s)
|
||||
{
|
||||
return PyUnicode_DecodeLocale(s, "surrogateescape");
|
||||
}
|
||||
|
||||
|
||||
/* Exported function to send one line to readline's init file parser */
|
||||
|
||||
static PyObject *
|
||||
parse_and_bind(PyObject *self, PyObject *args)
|
||||
parse_and_bind(PyObject *self, PyObject *string)
|
||||
{
|
||||
char *s, *copy;
|
||||
if (!PyArg_ParseTuple(args, "s:parse_and_bind", &s))
|
||||
char *copy;
|
||||
PyObject *encoded = encode(string);
|
||||
if (encoded == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
/* Make a copy -- rl_parse_and_bind() modifies its argument */
|
||||
/* Bernard Herzog */
|
||||
copy = PyMem_Malloc(1 + strlen(s));
|
||||
if (copy == NULL)
|
||||
copy = PyMem_Malloc(1 + PyBytes_GET_SIZE(encoded));
|
||||
if (copy == NULL) {
|
||||
Py_DECREF(encoded);
|
||||
return PyErr_NoMemory();
|
||||
strcpy(copy, s);
|
||||
}
|
||||
strcpy(copy, PyBytes_AS_STRING(encoded));
|
||||
Py_DECREF(encoded);
|
||||
rl_parse_and_bind(copy);
|
||||
PyMem_Free(copy); /* Free the copy */
|
||||
Py_RETURN_NONE;
|
||||
|
@ -441,17 +461,18 @@ get the ending index of the completion scope");
|
|||
/* Set the tab-completion word-delimiters that readline uses */
|
||||
|
||||
static PyObject *
|
||||
set_completer_delims(PyObject *self, PyObject *args)
|
||||
set_completer_delims(PyObject *self, PyObject *string)
|
||||
{
|
||||
char *break_chars;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "s:set_completer_delims", &break_chars)) {
|
||||
PyObject *encoded = encode(string);
|
||||
if (encoded == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
/* Keep a reference to the allocated memory in the module state in case
|
||||
some other module modifies rl_completer_word_break_characters
|
||||
(see issue #17289). */
|
||||
break_chars = strdup(break_chars);
|
||||
break_chars = strdup(PyBytes_AS_STRING(encoded));
|
||||
Py_DECREF(encoded);
|
||||
if (break_chars) {
|
||||
free(completer_word_break_characters);
|
||||
completer_word_break_characters = break_chars;
|
||||
|
@ -531,10 +552,11 @@ static PyObject *
|
|||
py_replace_history(PyObject *self, PyObject *args)
|
||||
{
|
||||
int entry_number;
|
||||
char *line;
|
||||
PyObject *line;
|
||||
PyObject *encoded;
|
||||
HIST_ENTRY *old_entry;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "is:replace_history_item", &entry_number,
|
||||
if (!PyArg_ParseTuple(args, "iU:replace_history_item", &entry_number,
|
||||
&line)) {
|
||||
return NULL;
|
||||
}
|
||||
|
@ -543,7 +565,12 @@ py_replace_history(PyObject *self, PyObject *args)
|
|||
"History index cannot be negative");
|
||||
return NULL;
|
||||
}
|
||||
old_entry = replace_history_entry(entry_number, line, (void *)NULL);
|
||||
encoded = encode(line);
|
||||
if (encoded == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
old_entry = replace_history_entry(entry_number, PyBytes_AS_STRING(encoded), (void *)NULL);
|
||||
Py_DECREF(encoded);
|
||||
if (!old_entry) {
|
||||
PyErr_Format(PyExc_ValueError,
|
||||
"No history item at position %d",
|
||||
|
@ -562,14 +589,14 @@ replaces history item given by its position with contents of line");
|
|||
/* Add a line to the history buffer */
|
||||
|
||||
static PyObject *
|
||||
py_add_history(PyObject *self, PyObject *args)
|
||||
py_add_history(PyObject *self, PyObject *string)
|
||||
{
|
||||
char *line;
|
||||
|
||||
if(!PyArg_ParseTuple(args, "s:add_history", &line)) {
|
||||
PyObject *encoded = encode(string);
|
||||
if (encoded == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
add_history(line);
|
||||
add_history(PyBytes_AS_STRING(encoded));
|
||||
Py_DECREF(encoded);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
|
@ -583,7 +610,7 @@ add an item to the history buffer");
|
|||
static PyObject *
|
||||
get_completer_delims(PyObject *self, PyObject *noarg)
|
||||
{
|
||||
return PyUnicode_FromString(rl_completer_word_break_characters);
|
||||
return decode(rl_completer_word_break_characters);
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(doc_get_completer_delims,
|
||||
|
@ -673,7 +700,7 @@ get_history_item(PyObject *self, PyObject *args)
|
|||
}
|
||||
#endif /* __APPLE__ */
|
||||
if ((hist_ent = history_get(idx)))
|
||||
return PyUnicode_FromString(hist_ent->line);
|
||||
return decode(hist_ent->line);
|
||||
else {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
@ -702,7 +729,7 @@ return the current (not the maximum) length of history.");
|
|||
static PyObject *
|
||||
get_line_buffer(PyObject *self, PyObject *noarg)
|
||||
{
|
||||
return PyUnicode_FromString(rl_line_buffer);
|
||||
return decode(rl_line_buffer);
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(doc_get_line_buffer,
|
||||
|
@ -730,12 +757,14 @@ Clear the current readline history.");
|
|||
/* Exported function to insert text into the line buffer */
|
||||
|
||||
static PyObject *
|
||||
insert_text(PyObject *self, PyObject *args)
|
||||
insert_text(PyObject *self, PyObject *string)
|
||||
{
|
||||
char *s;
|
||||
if (!PyArg_ParseTuple(args, "s:insert_text", &s))
|
||||
PyObject *encoded = encode(string);
|
||||
if (encoded == NULL) {
|
||||
return NULL;
|
||||
rl_insert_text(s);
|
||||
}
|
||||
rl_insert_text(PyBytes_AS_STRING(encoded));
|
||||
Py_DECREF(encoded);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
|
@ -763,9 +792,9 @@ contents of the line buffer.");
|
|||
|
||||
static struct PyMethodDef readline_methods[] =
|
||||
{
|
||||
{"parse_and_bind", parse_and_bind, METH_VARARGS, doc_parse_and_bind},
|
||||
{"parse_and_bind", parse_and_bind, METH_O, doc_parse_and_bind},
|
||||
{"get_line_buffer", get_line_buffer, METH_NOARGS, doc_get_line_buffer},
|
||||
{"insert_text", insert_text, METH_VARARGS, doc_insert_text},
|
||||
{"insert_text", insert_text, METH_O, doc_insert_text},
|
||||
{"redisplay", redisplay, METH_NOARGS, doc_redisplay},
|
||||
{"read_init_file", read_init_file, METH_VARARGS, doc_read_init_file},
|
||||
{"read_history_file", read_history_file,
|
||||
|
@ -792,8 +821,8 @@ static struct PyMethodDef readline_methods[] =
|
|||
{"get_endidx", get_endidx, METH_NOARGS, doc_get_endidx},
|
||||
|
||||
{"set_completer_delims", set_completer_delims,
|
||||
METH_VARARGS, doc_set_completer_delims},
|
||||
{"add_history", py_add_history, METH_VARARGS, doc_add_history},
|
||||
METH_O, doc_set_completer_delims},
|
||||
{"add_history", py_add_history, METH_O, doc_add_history},
|
||||
{"remove_history_item", py_remove_history, METH_VARARGS, doc_remove_history},
|
||||
{"replace_history_item", py_replace_history, METH_VARARGS, doc_replace_history},
|
||||
{"get_completer_delims", get_completer_delims,
|
||||
|
@ -890,7 +919,7 @@ on_completion_display_matches_hook(char **matches,
|
|||
int num_matches, int max_length)
|
||||
{
|
||||
int i;
|
||||
PyObject *m=NULL, *s=NULL, *r=NULL;
|
||||
PyObject *sub, *m=NULL, *s=NULL, *r=NULL;
|
||||
#ifdef WITH_THREAD
|
||||
PyGILState_STATE gilstate = PyGILState_Ensure();
|
||||
#endif
|
||||
|
@ -898,16 +927,17 @@ on_completion_display_matches_hook(char **matches,
|
|||
if (m == NULL)
|
||||
goto error;
|
||||
for (i = 0; i < num_matches; i++) {
|
||||
s = PyUnicode_FromString(matches[i+1]);
|
||||
s = decode(matches[i+1]);
|
||||
if (s == NULL)
|
||||
goto error;
|
||||
if (PyList_SetItem(m, i, s) == -1)
|
||||
goto error;
|
||||
}
|
||||
sub = decode(matches[0]);
|
||||
r = PyObject_CallFunction(readlinestate_global->completion_display_matches_hook,
|
||||
"sOi", matches[0], m, max_length);
|
||||
"NNi", sub, m, max_length);
|
||||
|
||||
Py_DECREF(m); m=NULL;
|
||||
m=NULL;
|
||||
|
||||
if (r == NULL ||
|
||||
(r != Py_None && PyLong_AsLong(r) == -1 && PyErr_Occurred())) {
|
||||
|
@ -955,22 +985,24 @@ on_completion(const char *text, int state)
|
|||
{
|
||||
char *result = NULL;
|
||||
if (readlinestate_global->completer != NULL) {
|
||||
PyObject *r;
|
||||
PyObject *r = NULL, *t;
|
||||
#ifdef WITH_THREAD
|
||||
PyGILState_STATE gilstate = PyGILState_Ensure();
|
||||
#endif
|
||||
rl_attempted_completion_over = 1;
|
||||
r = PyObject_CallFunction(readlinestate_global->completer, "si", text, state);
|
||||
t = decode(text);
|
||||
r = PyObject_CallFunction(readlinestate_global->completer, "Ni", t, state);
|
||||
if (r == NULL)
|
||||
goto error;
|
||||
if (r == Py_None) {
|
||||
result = NULL;
|
||||
}
|
||||
else {
|
||||
char *s = _PyUnicode_AsString(r);
|
||||
if (s == NULL)
|
||||
PyObject *encoded = encode(r);
|
||||
if (encoded == NULL)
|
||||
goto error;
|
||||
result = strdup(s);
|
||||
result = strdup(PyBytes_AS_STRING(encoded));
|
||||
Py_DECREF(encoded);
|
||||
}
|
||||
Py_DECREF(r);
|
||||
goto done;
|
||||
|
@ -994,6 +1026,9 @@ static char **
|
|||
flex_complete(const char *text, int start, int end)
|
||||
{
|
||||
char **result;
|
||||
char saved;
|
||||
size_t start_size, end_size;
|
||||
wchar_t *s;
|
||||
#ifdef WITH_THREAD
|
||||
PyGILState_STATE gilstate = PyGILState_Ensure();
|
||||
#endif
|
||||
|
@ -1003,6 +1038,27 @@ flex_complete(const char *text, int start, int end)
|
|||
#ifdef HAVE_RL_COMPLETION_SUPPRESS_APPEND
|
||||
rl_completion_suppress_append = 0;
|
||||
#endif
|
||||
|
||||
saved = rl_line_buffer[start];
|
||||
rl_line_buffer[start] = 0;
|
||||
s = Py_DecodeLocale(rl_line_buffer, &start_size);
|
||||
rl_line_buffer[start] = saved;
|
||||
if (s == NULL) {
|
||||
goto done;
|
||||
}
|
||||
PyMem_RawFree(s);
|
||||
saved = rl_line_buffer[end];
|
||||
rl_line_buffer[end] = 0;
|
||||
s = Py_DecodeLocale(rl_line_buffer + start, &end_size);
|
||||
rl_line_buffer[end] = saved;
|
||||
if (s == NULL) {
|
||||
goto done;
|
||||
}
|
||||
PyMem_RawFree(s);
|
||||
start = (int)start_size;
|
||||
end = start + (int)end_size;
|
||||
|
||||
done:
|
||||
Py_XDECREF(readlinestate_global->begidx);
|
||||
Py_XDECREF(readlinestate_global->endidx);
|
||||
readlinestate_global->begidx = PyLong_FromLong((long) start);
|
||||
|
|
|
@ -2109,7 +2109,7 @@ _Py_CheckFunctionResult(PyObject *func, PyObject *result, const char *where)
|
|||
"%s returned NULL without setting an error",
|
||||
where);
|
||||
#ifdef Py_DEBUG
|
||||
/* Ensure that the bug is catched in debug mode */
|
||||
/* Ensure that the bug is caught in debug mode */
|
||||
Py_FatalError("a function returned NULL without setting an error");
|
||||
#endif
|
||||
return NULL;
|
||||
|
@ -2132,7 +2132,7 @@ _Py_CheckFunctionResult(PyObject *func, PyObject *result, const char *where)
|
|||
where);
|
||||
_PyErr_ChainExceptions(exc, val, tb);
|
||||
#ifdef Py_DEBUG
|
||||
/* Ensure that the bug is catched in debug mode */
|
||||
/* Ensure that the bug is caught in debug mode */
|
||||
Py_FatalError("a function returned a result with an error set");
|
||||
#endif
|
||||
return NULL;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue