mirror of https://github.com/python/cpython
Merge heads.
This commit is contained in:
commit
c8c47f55e6
|
@ -133,15 +133,15 @@ a buffer, see :c:func:`PyObject_GetBuffer`.
|
|||
called on non-NULL :c:member:`~Py_buffer.format` values.
|
||||
|
||||
Important exception: If a consumer requests a buffer without the
|
||||
:c:macro:`PyBUF_FORMAT` flag, :c:member:`~Py_Buffer.format` will
|
||||
:c:macro:`PyBUF_FORMAT` flag, :c:member:`~Py_buffer.format` will
|
||||
be set to *NULL*, but :c:member:`~Py_buffer.itemsize` still has
|
||||
the value for the original format.
|
||||
|
||||
If :c:member:`~Py_Buffer.shape` is present, the equality
|
||||
If :c:member:`~Py_buffer.shape` is present, the equality
|
||||
``product(shape) * itemsize == len`` still holds and the consumer
|
||||
can use :c:member:`~Py_buffer.itemsize` to navigate the buffer.
|
||||
|
||||
If :c:member:`~Py_Buffer.shape` is *NULL* as a result of a :c:macro:`PyBUF_SIMPLE`
|
||||
If :c:member:`~Py_buffer.shape` is *NULL* as a result of a :c:macro:`PyBUF_SIMPLE`
|
||||
or a :c:macro:`PyBUF_WRITABLE` request, the consumer must disregard
|
||||
:c:member:`~Py_buffer.itemsize` and assume ``itemsize == 1``.
|
||||
|
||||
|
@ -156,7 +156,7 @@ a buffer, see :c:func:`PyObject_GetBuffer`.
|
|||
.. c:member:: int ndim
|
||||
|
||||
The number of dimensions the memory represents as an n-dimensional array.
|
||||
If it is 0, :c:member:`~Py_Buffer.buf` points to a single item representing
|
||||
If it is 0, :c:member:`~Py_buffer.buf` points to a single item representing
|
||||
a scalar. In this case, :c:member:`~Py_buffer.shape`, :c:member:`~Py_buffer.strides`
|
||||
and :c:member:`~Py_buffer.suboffsets` MUST be *NULL*.
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ type objects) *must* have the :attr:`ob_size` field.
|
|||
This field is not inherited by subtypes.
|
||||
|
||||
|
||||
.. c:member:: char* PyTypeObject.tp_name
|
||||
.. c:member:: const char* PyTypeObject.tp_name
|
||||
|
||||
Pointer to a NUL-terminated string containing the name of the type. For types
|
||||
that are accessible as module globals, the string should be the full module
|
||||
|
@ -372,7 +372,7 @@ type objects) *must* have the :attr:`ob_size` field.
|
|||
inherited individually.
|
||||
|
||||
|
||||
.. c:member:: long PyTypeObject.tp_flags
|
||||
.. c:member:: unsigned long PyTypeObject.tp_flags
|
||||
|
||||
This field is a bit mask of various flags. Some flags indicate variant
|
||||
semantics for certain situations; others are used to indicate that certain
|
||||
|
@ -472,7 +472,7 @@ type objects) *must* have the :attr:`ob_size` field.
|
|||
.. versionadded:: 3.4
|
||||
|
||||
|
||||
.. c:member:: char* PyTypeObject.tp_doc
|
||||
.. c:member:: const char* PyTypeObject.tp_doc
|
||||
|
||||
An optional pointer to a NUL-terminated C string giving the docstring for this
|
||||
type object. This is exposed as the :attr:`__doc__` attribute on the type and
|
||||
|
@ -619,7 +619,7 @@ type objects) *must* have the :attr:`ob_size` field.
|
|||
+----------------+------------+
|
||||
|
||||
|
||||
.. c:member:: long PyTypeObject.tp_weaklistoffset
|
||||
.. c:member:: Py_ssize_t PyTypeObject.tp_weaklistoffset
|
||||
|
||||
If the instances of this type are weakly referenceable, this field is greater
|
||||
than zero and contains the offset in the instance structure of the weak
|
||||
|
@ -786,7 +786,7 @@ type objects) *must* have the :attr:`ob_size` field.
|
|||
.. XXX explain.
|
||||
|
||||
|
||||
.. c:member:: long PyTypeObject.tp_dictoffset
|
||||
.. c:member:: Py_ssize_t PyTypeObject.tp_dictoffset
|
||||
|
||||
If the instances of this type have a dictionary containing instance variables,
|
||||
this field is non-zero and contains the offset in the instances of the type of
|
||||
|
|
|
@ -893,20 +893,20 @@ fields in the right order! It's often easiest to find an example that includes
|
|||
all the fields you need (even if they're initialized to ``0``) and then change
|
||||
the values to suit your new type. ::
|
||||
|
||||
char *tp_name; /* For printing */
|
||||
const char *tp_name; /* For printing */
|
||||
|
||||
The name of the type - as mentioned in the last section, this will appear in
|
||||
various places, almost entirely for diagnostic purposes. Try to choose something
|
||||
that will be helpful in such a situation! ::
|
||||
|
||||
int tp_basicsize, tp_itemsize; /* For allocation */
|
||||
Py_ssize_t tp_basicsize, tp_itemsize; /* For allocation */
|
||||
|
||||
These fields tell the runtime how much memory to allocate when new objects of
|
||||
this type are created. Python has some built-in support for variable length
|
||||
structures (think: strings, lists) which is where the :c:member:`~PyTypeObject.tp_itemsize` field
|
||||
comes in. This will be dealt with later. ::
|
||||
|
||||
char *tp_doc;
|
||||
const char *tp_doc;
|
||||
|
||||
Here you can put a string (or its address) that you want returned when the
|
||||
Python script references ``obj.__doc__`` to retrieve the doc string.
|
||||
|
|
|
@ -1164,6 +1164,8 @@ analogue of lisp car is ``lisp_list[0]`` and the analogue of cdr is
|
|||
usually a lot slower than using Python lists.
|
||||
|
||||
|
||||
.. _faq-multidimensional-list:
|
||||
|
||||
How do I create a multidimensional list?
|
||||
----------------------------------------
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
typedef struct _typeobject {
|
||||
PyObject_VAR_HEAD
|
||||
char *tp_name; /* For printing, in format "<module>.<name>" */
|
||||
int tp_basicsize, tp_itemsize; /* For allocation */
|
||||
const char *tp_name; /* For printing, in format "<module>.<name>" */
|
||||
Py_ssize_t tp_basicsize, tp_itemsize; /* For allocation */
|
||||
|
||||
/* Methods to implement standard operations */
|
||||
|
||||
|
@ -9,7 +9,8 @@ typedef struct _typeobject {
|
|||
printfunc tp_print;
|
||||
getattrfunc tp_getattr;
|
||||
setattrfunc tp_setattr;
|
||||
PyAsyncMethods *tp_as_async;
|
||||
PyAsyncMethods *tp_as_async; /* formerly known as tp_compare (Python 2)
|
||||
or tp_reserved (Python 3) */
|
||||
reprfunc tp_repr;
|
||||
|
||||
/* Method suites for standard classes */
|
||||
|
@ -30,9 +31,9 @@ typedef struct _typeobject {
|
|||
PyBufferProcs *tp_as_buffer;
|
||||
|
||||
/* Flags to define presence of optional/expanded features */
|
||||
long tp_flags;
|
||||
unsigned long tp_flags;
|
||||
|
||||
char *tp_doc; /* Documentation string */
|
||||
const char *tp_doc; /* Documentation string */
|
||||
|
||||
/* call function for all accessible objects */
|
||||
traverseproc tp_traverse;
|
||||
|
@ -44,7 +45,7 @@ typedef struct _typeobject {
|
|||
richcmpfunc tp_richcompare;
|
||||
|
||||
/* weak reference enabler */
|
||||
long tp_weaklistoffset;
|
||||
Py_ssize_t tp_weaklistoffset;
|
||||
|
||||
/* Iterators */
|
||||
getiterfunc tp_iter;
|
||||
|
@ -58,7 +59,7 @@ typedef struct _typeobject {
|
|||
PyObject *tp_dict;
|
||||
descrgetfunc tp_descr_get;
|
||||
descrsetfunc tp_descr_set;
|
||||
long tp_dictoffset;
|
||||
Py_ssize_t tp_dictoffset;
|
||||
initproc tp_init;
|
||||
allocfunc tp_alloc;
|
||||
newfunc tp_new;
|
||||
|
@ -69,7 +70,6 @@ typedef struct _typeobject {
|
|||
PyObject *tp_cache;
|
||||
PyObject *tp_subclasses;
|
||||
PyObject *tp_weaklist;
|
||||
|
||||
destructor tp_del;
|
||||
|
||||
/* Type attribute cache version tag. Added in version 2.6 */
|
||||
|
|
|
@ -93,7 +93,8 @@ It defines the following constants and functions:
|
|||
Return the thread stack size used when creating new threads. The optional
|
||||
*size* argument specifies the stack size to be used for subsequently created
|
||||
threads, and must be 0 (use platform or configured default) or a positive
|
||||
integer value of at least 32,768 (32 KiB). If changing the thread stack size is
|
||||
integer value of at least 32,768 (32 KiB). If *size* is not specified,
|
||||
0 is used. If changing the thread stack size is
|
||||
unsupported, a :exc:`RuntimeError` is raised. If the specified stack size is
|
||||
invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32 KiB
|
||||
is currently the minimum supported stack size value to guarantee sufficient
|
||||
|
|
|
@ -6,7 +6,7 @@ Base Event Loop
|
|||
===============
|
||||
|
||||
The event loop is the central execution device provided by :mod:`asyncio`.
|
||||
It provides multiple facilities, amongst which:
|
||||
It provides multiple facilities, including:
|
||||
|
||||
* Registering, executing and cancelling delayed calls (timeouts).
|
||||
|
||||
|
|
|
@ -303,7 +303,7 @@ Process
|
|||
.. _asyncio-subprocess-threads:
|
||||
|
||||
Subprocess and threads
|
||||
======================
|
||||
----------------------
|
||||
|
||||
asyncio supports running subprocesses from different threads, but there
|
||||
are limits:
|
||||
|
@ -322,10 +322,10 @@ The :class:`asyncio.subprocess.Process` class is not thread safe.
|
|||
|
||||
|
||||
Subprocess examples
|
||||
===================
|
||||
-------------------
|
||||
|
||||
Subprocess using transport and protocol
|
||||
---------------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Example of a subprocess protocol using to get the output of a subprocess and to
|
||||
wait for the subprocess exit. The subprocess is created by the
|
||||
|
@ -381,7 +381,7 @@ wait for the subprocess exit. The subprocess is created by the
|
|||
|
||||
|
||||
Subprocess using streams
|
||||
------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Example using the :class:`~asyncio.subprocess.Process` class to control the
|
||||
subprocess and the :class:`StreamReader` class to read from the standard
|
||||
|
|
|
@ -4,6 +4,13 @@
|
|||
.. module:: asyncio
|
||||
:synopsis: Asynchronous I/O, event loop, coroutines and tasks.
|
||||
|
||||
.. note::
|
||||
|
||||
The asyncio package has been included in the standard library on a
|
||||
:term:`provisional basis <provisional package>`. Backwards incompatible
|
||||
changes (up to and including removal of the module) may occur if deemed
|
||||
necessary by the core developers.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
|
||||
**Source code:** :source:`Lib/asyncio/`
|
||||
|
|
|
@ -842,10 +842,10 @@ field names, the method and attribute names start with an underscore.
|
|||
.. method:: somenamedtuple._asdict()
|
||||
|
||||
Return a new :class:`OrderedDict` which maps field names to their corresponding
|
||||
values. Note, this method is no longer needed now that the same effect can
|
||||
be achieved by using the built-in :func:`vars` function::
|
||||
values::
|
||||
|
||||
>>> vars(p)
|
||||
>>> p = Point(x=11, y=22)
|
||||
>>> p._asdict()
|
||||
OrderedDict([('x', 11), ('y', 22)])
|
||||
|
||||
.. versionchanged:: 3.1
|
||||
|
|
|
@ -946,8 +946,8 @@ the more significant byte last.
|
|||
Creates a new function object, sets its *__closure__* slot, and pushes it on
|
||||
the stack. TOS is the :term:`qualified name` of the function, TOS1 is the
|
||||
code associated with the function, and TOS2 is the tuple containing cells for
|
||||
the closure's free variables. The function also has *argc* default
|
||||
parameters, which are found below the cells.
|
||||
the closure's free variables. *argc* is interpreted as in ``MAKE_FUNCTION``;
|
||||
the annotations and defaults are also in the same order below TOS2.
|
||||
|
||||
|
||||
.. opcode:: BUILD_SLICE (argc)
|
||||
|
|
|
@ -185,7 +185,7 @@ implementations do nothing (except for :meth:`~HTMLParser.handle_startendtag`):
|
|||
|
||||
The content of Internet Explorer conditional comments (condcoms) will also be
|
||||
sent to this method, so, for ``<!--[if IE 9]>IE9-specific content<![endif]-->``,
|
||||
this method will receive ``'[if IE 9]>IE-specific content<![endif]'``.
|
||||
this method will receive ``'[if IE 9]>IE9-specific content<![endif]'``.
|
||||
|
||||
|
||||
.. method:: HTMLParser.handle_decl(decl)
|
||||
|
|
|
@ -178,6 +178,10 @@ attributes:
|
|||
+-----------+-----------------+---------------------------+
|
||||
| | gi_code | code |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | gi_yieldfrom | object being iterated by |
|
||||
| | | ``yield from``, or |
|
||||
| | | ``None`` |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| coroutine | __name__ | name |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | __qualname__ | qualified name |
|
||||
|
@ -191,10 +195,6 @@ attributes:
|
|||
+-----------+-----------------+---------------------------+
|
||||
| | cr_code | code |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | gi_yieldfrom | object being iterated by |
|
||||
| | | ``yield from``, or |
|
||||
| | | ``None`` |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| builtin | __doc__ | documentation string |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | __name__ | original name of this |
|
||||
|
@ -209,9 +209,10 @@ attributes:
|
|||
|
||||
.. versionchanged:: 3.5
|
||||
|
||||
Add ``__qualname__`` attribute to generators. The ``__name__`` attribute of
|
||||
generators is now set from the function name, instead of the code name, and
|
||||
it can now be modified.
|
||||
Add ``__qualname__`` and ``gi_yieldfrom`` attributes to generators.
|
||||
|
||||
The ``__name__`` attribute of generators is now set from the function
|
||||
name, instead of the code name, and it can now be modified.
|
||||
|
||||
|
||||
.. function:: getmembers(object[, predicate])
|
||||
|
|
|
@ -575,7 +575,7 @@ so to avoid duplication they are only documented for :class:`IPv4Network`.
|
|||
single-address network, with the network address being *address* and
|
||||
the mask being ``/128``.
|
||||
|
||||
3. An integer packed into a :class:`bytes` object of length 16, bit-endian.
|
||||
3. An integer packed into a :class:`bytes` object of length 16, big-endian.
|
||||
The interpretation is similar to an integer *address*.
|
||||
|
||||
4. A two-tuple of an address description and a netmask, where the address
|
||||
|
|
|
@ -82,8 +82,21 @@ the :mod:`glob` module.)
|
|||
|
||||
Return the longest path prefix (taken character-by-character) that is a
|
||||
prefix of all paths in *list*. If *list* is empty, return the empty string
|
||||
(``''``). Note that this may return invalid paths because it works a
|
||||
character at a time. To obtain a valid path, see :func:`commonpath`.
|
||||
(``''``).
|
||||
|
||||
.. note::
|
||||
|
||||
This function may return invalid paths because it works a
|
||||
character at a time. To obtain a valid path, see
|
||||
:func:`commonpath`.
|
||||
|
||||
::
|
||||
|
||||
>>> os.path.commonprefix(['/usr/lib', '/usr/local/lib'])
|
||||
'/usr/l'
|
||||
|
||||
>>> os.path.commonpath(['/usr/lib', '/usr/local/lib'])
|
||||
'/usr'
|
||||
|
||||
|
||||
.. function:: dirname(path)
|
||||
|
|
|
@ -854,8 +854,8 @@ operations have the same priority as the corresponding numeric operations.
|
|||
| ``s + t`` | the concatenation of *s* and | (6)(7) |
|
||||
| | *t* | |
|
||||
+--------------------------+--------------------------------+----------+
|
||||
| ``s * n`` or | *n* shallow copies of *s* | (2)(7) |
|
||||
| ``n * s`` | concatenated | |
|
||||
| ``s * n`` or | equivalent to adding *s* to | (2)(7) |
|
||||
| ``n * s`` | itself *n* times | |
|
||||
+--------------------------+--------------------------------+----------+
|
||||
| ``s[i]`` | *i*\ th item of *s*, origin 0 | \(3) |
|
||||
+--------------------------+--------------------------------+----------+
|
||||
|
@ -897,9 +897,9 @@ Notes:
|
|||
|
||||
(2)
|
||||
Values of *n* less than ``0`` are treated as ``0`` (which yields an empty
|
||||
sequence of the same type as *s*). Note also that the copies are shallow;
|
||||
nested structures are not copied. This often haunts new Python programmers;
|
||||
consider::
|
||||
sequence of the same type as *s*). Note that items in the sequence *s*
|
||||
are not copied; they are referenced multiple times. This often haunts
|
||||
new Python programmers; consider::
|
||||
|
||||
>>> lists = [[]] * 3
|
||||
>>> lists
|
||||
|
@ -909,7 +909,7 @@ Notes:
|
|||
[[3], [3], [3]]
|
||||
|
||||
What has happened is that ``[[]]`` is a one-element list containing an empty
|
||||
list, so all three elements of ``[[]] * 3`` are (pointers to) this single empty
|
||||
list, so all three elements of ``[[]] * 3`` are references to this single empty
|
||||
list. Modifying any of the elements of ``lists`` modifies this single list.
|
||||
You can create a list of different lists this way::
|
||||
|
||||
|
@ -920,6 +920,9 @@ Notes:
|
|||
>>> lists
|
||||
[[3], [5], [7]]
|
||||
|
||||
Further explanation is available in the FAQ entry
|
||||
:ref:`faq-multidimensional-list`.
|
||||
|
||||
(3)
|
||||
If *i* or *j* is negative, the index is relative to the end of the string:
|
||||
``len(s) + i`` or ``len(s) + j`` is substituted. But note that ``-0`` is
|
||||
|
|
|
@ -1068,7 +1068,7 @@ Return code handling translates as follows::
|
|||
if rc is not None and rc >> 8:
|
||||
print("There were some errors")
|
||||
==>
|
||||
process = Popen(cmd, 'w', stdin=PIPE)
|
||||
process = Popen(cmd, stdin=PIPE)
|
||||
...
|
||||
process.stdin.close()
|
||||
if process.wait() != 0:
|
||||
|
|
|
@ -16,16 +16,18 @@
|
|||
|
||||
--------------
|
||||
|
||||
This module generates temporary files and directories. It works on all
|
||||
supported platforms. It provides three new functions,
|
||||
:func:`NamedTemporaryFile`, :func:`mkstemp`, and :func:`mkdtemp`, which should
|
||||
eliminate all remaining need to use the insecure :func:`mktemp` function.
|
||||
Temporary file names created by this module no longer contain the process ID;
|
||||
instead a string of six random characters is used.
|
||||
This module creates temporary files and directories. It works on all
|
||||
supported platforms. :class:`TemporaryFile`, :class:`NamedTemporaryFile`,
|
||||
:class:`TemporaryDirectory`, and :class:`SpooledTemporaryFile` are high-level
|
||||
interfaces which provide automatic cleanup and can be used as
|
||||
context managers. :func:`mkstemp` and
|
||||
:func:`mkdtemp` are lower-level functions which require manual cleanup.
|
||||
|
||||
Also, all the user-callable functions now take additional arguments which
|
||||
allow direct control over the location and name of temporary files. It is
|
||||
no longer necessary to use the global *tempdir* variable.
|
||||
All the user-callable functions and constructors take additional arguments which
|
||||
allow direct control over the location and name of temporary files and
|
||||
directories. Files names used by this module include a string of
|
||||
random characters which allows those files to be securely created in
|
||||
shared temporary directories.
|
||||
To maintain backward compatibility, the argument order is somewhat odd; it
|
||||
is recommended to use keyword arguments for clarity.
|
||||
|
||||
|
@ -34,28 +36,33 @@ The module defines the following user-callable items:
|
|||
.. function:: TemporaryFile(mode='w+b', buffering=None, encoding=None, newline=None, suffix='', prefix='tmp', dir=None)
|
||||
|
||||
Return a :term:`file-like object` that can be used as a temporary storage area.
|
||||
The file is created using :func:`mkstemp`. It will be destroyed as soon
|
||||
The file is created securely, using the same rules as :func:`mkstemp`. It will be destroyed as soon
|
||||
as it is closed (including an implicit close when the object is garbage
|
||||
collected). Under Unix, the directory entry for the file is removed
|
||||
collected). Under Unix, the directory entry for the file is either not created at all or is removed
|
||||
immediately after the file is created. Other platforms do not support
|
||||
this; your code should not rely on a temporary file created using this
|
||||
function having or not having a visible name in the file system.
|
||||
|
||||
The resulting object can be used as a context manager (see
|
||||
:ref:`tempfile-examples`). On completion of the context or
|
||||
destruction of the file object the temporary file will be removed
|
||||
from the filesystem.
|
||||
|
||||
The *mode* parameter defaults to ``'w+b'`` so that the file created can
|
||||
be read and written without being closed. Binary mode is used so that it
|
||||
behaves consistently on all platforms without regard for the data that is
|
||||
stored. *buffering*, *encoding* and *newline* are interpreted as for
|
||||
:func:`open`.
|
||||
|
||||
The *dir*, *prefix* and *suffix* parameters are passed to :func:`mkstemp`.
|
||||
The *dir*, *prefix* and *suffix* parameters have the same meaning
|
||||
as with :func:`mkstemp`.
|
||||
|
||||
The returned object is a true file object on POSIX platforms. On other
|
||||
platforms, it is a file-like object whose :attr:`!file` attribute is the
|
||||
underlying true file object. This file-like object can be used in a
|
||||
:keyword:`with` statement, just like a normal file.
|
||||
underlying true file object.
|
||||
|
||||
The :py:data:`os.O_TMPFILE` flag is used if it is available and works
|
||||
(Linux-specific, require Linux kernel 3.11 or later).
|
||||
(Linux-specific, requires Linux kernel 3.11 or later).
|
||||
|
||||
.. versionchanged:: 3.5
|
||||
|
||||
|
@ -101,10 +108,9 @@ The module defines the following user-callable items:
|
|||
|
||||
.. function:: TemporaryDirectory(suffix='', prefix='tmp', dir=None)
|
||||
|
||||
This function creates a temporary directory using :func:`mkdtemp`
|
||||
(the supplied arguments are passed directly to the underlying function).
|
||||
This function securely creates a temporary directory using the same rules as :func:`mkdtemp`.
|
||||
The resulting object can be used as a context manager (see
|
||||
:ref:`context-managers`). On completion of the context or destruction
|
||||
:ref:`tempfile-examples`). On completion of the context or destruction
|
||||
of the temporary directory object the newly created temporary directory
|
||||
and all its contents are removed from the filesystem.
|
||||
|
||||
|
@ -194,49 +200,14 @@ The module defines the following user-callable items:
|
|||
an appropriate default value to be used.
|
||||
|
||||
|
||||
.. function:: mktemp(suffix='', prefix='tmp', dir=None)
|
||||
.. function:: gettempdir()
|
||||
|
||||
.. deprecated:: 2.3
|
||||
Use :func:`mkstemp` instead.
|
||||
Return the name of the directory used for temporary files. This
|
||||
defines the default value for the *dir* argument to all functions
|
||||
in this module.
|
||||
|
||||
Return an absolute pathname of a file that did not exist at the time the
|
||||
call is made. The *prefix*, *suffix*, and *dir* arguments are the same
|
||||
as for :func:`mkstemp`.
|
||||
|
||||
.. warning::
|
||||
|
||||
Use of this function may introduce a security hole in your program. By
|
||||
the time you get around to doing anything with the file name it returns,
|
||||
someone else may have beaten you to the punch. :func:`mktemp` usage can
|
||||
be replaced easily with :func:`NamedTemporaryFile`, passing it the
|
||||
``delete=False`` parameter::
|
||||
|
||||
>>> f = NamedTemporaryFile(delete=False)
|
||||
>>> f.name
|
||||
'/tmp/tmptjujjt'
|
||||
>>> f.write(b"Hello World!\n")
|
||||
13
|
||||
>>> f.close()
|
||||
>>> os.unlink(f.name)
|
||||
>>> os.path.exists(f.name)
|
||||
False
|
||||
|
||||
The module uses a global variable that tell it how to construct a
|
||||
temporary name. They are initialized at the first call to any of the
|
||||
functions above. The caller may change them, but this is discouraged; use
|
||||
the appropriate function arguments, instead.
|
||||
|
||||
|
||||
.. data:: tempdir
|
||||
|
||||
When set to a value other than ``None``, this variable defines the
|
||||
default value for the *dir* argument to all the functions defined in this
|
||||
module.
|
||||
|
||||
If ``tempdir`` is unset or ``None`` at any call to any of the above
|
||||
functions, Python searches a standard list of directories and sets
|
||||
*tempdir* to the first one which the calling user can create files in.
|
||||
The list is:
|
||||
Python searches a standard list of directories to find one which
|
||||
the calling user can create files in. The list is:
|
||||
|
||||
#. The directory named by the :envvar:`TMPDIR` environment variable.
|
||||
|
||||
|
@ -254,12 +225,8 @@ the appropriate function arguments, instead.
|
|||
|
||||
#. As a last resort, the current working directory.
|
||||
|
||||
|
||||
.. function:: gettempdir()
|
||||
|
||||
Return the directory currently selected to create temporary files in. If
|
||||
:data:`tempdir` is not ``None``, this simply returns its contents; otherwise,
|
||||
the search described above is performed, and the result returned.
|
||||
The result of this search is cached, see the description of
|
||||
:data:`tempdir` below.
|
||||
|
||||
.. function:: gettempdirb()
|
||||
|
||||
|
@ -278,6 +245,23 @@ the appropriate function arguments, instead.
|
|||
|
||||
.. versionadded:: 3.5
|
||||
|
||||
The module uses a global variable to store the name of the directory
|
||||
used for temporary files returned by :func:`gettempdir`. It can be
|
||||
set directly to override the selection process, but this is discouraged.
|
||||
All functions in this module take a *dir* argument which can be used
|
||||
to specify the directory and this is the recommend approach.
|
||||
|
||||
.. data:: tempdir
|
||||
|
||||
When set to a value other than ``None``, this variable defines the
|
||||
default value for the *dir* argument to all the functions defined in this
|
||||
module.
|
||||
|
||||
If ``tempdir`` is unset or ``None`` at any call to any of the above
|
||||
functions except :func:`gettempprefix` it is initalized following the
|
||||
algorithm described in :func:`gettempdir`.
|
||||
|
||||
.. _tempfile-examples:
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
@ -311,3 +295,42 @@ Here are some examples of typical usage of the :mod:`tempfile` module::
|
|||
>>>
|
||||
# directory and contents have been removed
|
||||
|
||||
|
||||
Deprecated functions and variables
|
||||
----------------------------------
|
||||
|
||||
A historical way to create temporary files was to first generate a
|
||||
file name with the :func:`mktemp` function and then create a file
|
||||
using this name. Unfortunately this is not secure, because a different
|
||||
process may create a file with this name in the time between the call
|
||||
to :func:`mktemp` and the subsequent attempt to create the file by the
|
||||
first process. The solution is to combine the two steps and create the
|
||||
file immediately. This approach is used by :func:`mkstemp` and the
|
||||
other functions described above.
|
||||
|
||||
.. function:: mktemp(suffix='', prefix='tmp', dir=None)
|
||||
|
||||
.. deprecated:: 2.3
|
||||
Use :func:`mkstemp` instead.
|
||||
|
||||
Return an absolute pathname of a file that did not exist at the time the
|
||||
call is made. The *prefix*, *suffix*, and *dir* arguments are the same
|
||||
as for :func:`mkstemp`.
|
||||
|
||||
.. warning::
|
||||
|
||||
Use of this function may introduce a security hole in your program. By
|
||||
the time you get around to doing anything with the file name it returns,
|
||||
someone else may have beaten you to the punch. :func:`mktemp` usage can
|
||||
be replaced easily with :func:`NamedTemporaryFile`, passing it the
|
||||
``delete=False`` parameter::
|
||||
|
||||
>>> f = NamedTemporaryFile(delete=False)
|
||||
>>> f.name
|
||||
'/tmp/tmptjujjt'
|
||||
>>> f.write(b"Hello World!\n")
|
||||
13
|
||||
>>> f.close()
|
||||
>>> os.unlink(f.name)
|
||||
>>> os.path.exists(f.name)
|
||||
False
|
||||
|
|
|
@ -89,7 +89,8 @@ This module defines the following functions:
|
|||
Return the thread stack size used when creating new threads. The optional
|
||||
*size* argument specifies the stack size to be used for subsequently created
|
||||
threads, and must be 0 (use platform or configured default) or a positive
|
||||
integer value of at least 32,768 (32 KiB). If changing the thread stack size is
|
||||
integer value of at least 32,768 (32 KiB). If *size* is not specified,
|
||||
0 is used. If changing the thread stack size is
|
||||
unsupported, a :exc:`RuntimeError` is raised. If the specified stack size is
|
||||
invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32 KiB
|
||||
is currently the minimum supported stack size value to guarantee sufficient
|
||||
|
|
|
@ -20,8 +20,9 @@ The function below takes and returns a string and is annotated as follows::
|
|||
def greeting(name: str) -> str:
|
||||
return 'Hello ' + name
|
||||
|
||||
In the function `greeting`, the argument `name` is expected to by of type `str`
|
||||
and the return type `str`. Subtypes are accepted as arguments.
|
||||
In the function ``greeting``, the argument ``name`` is expected to by of type
|
||||
:class:`str` and the return type :class:`str`. Subtypes are accepted as
|
||||
arguments.
|
||||
|
||||
Type aliases
|
||||
------------
|
||||
|
@ -49,8 +50,8 @@ For example::
|
|||
|
||||
It is possible to declare the return type of a callable without specifying
|
||||
the call signature by substituting a literal ellipsis
|
||||
for the list of arguments in the type hint: `Callable[..., ReturnType]`.
|
||||
`None` as a type hint is a special case and is replaced by `type(None)`.
|
||||
for the list of arguments in the type hint: ``Callable[..., ReturnType]``.
|
||||
``None`` as a type hint is a special case and is replaced by ``type(None)``.
|
||||
|
||||
Generics
|
||||
--------
|
||||
|
@ -108,11 +109,12 @@ A user-defined class can be defined as a generic class.
|
|||
def log(self, message: str) -> None:
|
||||
self.logger.info('{}: {}'.format(self.name, message))
|
||||
|
||||
`Generic[T]` as a base class defines that the class `LoggedVar` takes a single
|
||||
type parameter `T` . This also makes `T` valid as a type within the class body.
|
||||
``Generic[T]`` as a base class defines that the class ``LoggedVar`` takes a
|
||||
single type parameter ``T`` . This also makes ``T`` valid as a type within the
|
||||
class body.
|
||||
|
||||
The `Generic` base class uses a metaclass that defines `__getitem__` so that
|
||||
`LoggedVar[t]` is valid as a type::
|
||||
The :class:`Generic` base class uses a metaclass that defines
|
||||
:meth:`__getitem__` so that ``LoggedVar[t]`` is valid as a type::
|
||||
|
||||
from typing import Iterable
|
||||
|
||||
|
@ -132,7 +134,7 @@ be constrained::
|
|||
class StrangePair(Generic[T, S]):
|
||||
...
|
||||
|
||||
Each type variable argument to `Generic` must be distinct.
|
||||
Each type variable argument to :class:`Generic` must be distinct.
|
||||
This is thus invalid::
|
||||
|
||||
from typing import TypeVar, Generic
|
||||
|
@ -152,9 +154,9 @@ You can use multiple inheritance with `Generic`::
|
|||
class LinkedList(Sized, Generic[T]):
|
||||
...
|
||||
|
||||
Subclassing a generic class without specifying type parameters assumes `Any`
|
||||
for each position. In the following example, `MyIterable` is not generic but
|
||||
implicitly inherits from `Iterable[Any]`::
|
||||
Subclassing a generic class without specifying type parameters assumes
|
||||
:class:`Any` for each position. In the following example, ``MyIterable`` is
|
||||
not generic but implicitly inherits from ``Iterable[Any]``::
|
||||
|
||||
from typing import Iterable
|
||||
|
||||
|
@ -162,24 +164,24 @@ implicitly inherits from `Iterable[Any]`::
|
|||
|
||||
Generic metaclasses are not supported.
|
||||
|
||||
The `Any` type
|
||||
--------------
|
||||
The :class:`Any` type
|
||||
---------------------
|
||||
|
||||
A special kind of type is `Any`. Every type is a subtype of `Any`.
|
||||
This is also true for the builtin type object. However, to the static type
|
||||
checker these are completely different.
|
||||
A special kind of type is :class:`Any`. Every type is a subtype of
|
||||
:class:`Any`. This is also true for the builtin type object. However, to the
|
||||
static type checker these are completely different.
|
||||
|
||||
When the type of a value is `object`, the type checker will reject almost all
|
||||
operations on it, and assigning it to a variable (or using it as a return value)
|
||||
of a more specialized type is a type error. On the other hand, when a value has
|
||||
type `Any`, the type checker will allow all operations on it, and a value of
|
||||
type `Any` can be assigned to a variable (or used as a return value) of a more
|
||||
constrained type.
|
||||
When the type of a value is :class:`object`, the type checker will reject
|
||||
almost all operations on it, and assigning it to a variable (or using it as a
|
||||
return value) of a more specialized type is a type error. On the other hand,
|
||||
when a value has type :class:`Any`, the type checker will allow all operations
|
||||
on it, and a value of type :class:`Any` can be assigned to a variable (or used
|
||||
as a return value) of a more constrained type.
|
||||
|
||||
Default argument values
|
||||
-----------------------
|
||||
|
||||
Use a literal ellipsis `...` to declare an argument as having a default value::
|
||||
Use a literal ellipsis ``...`` to declare an argument as having a default value::
|
||||
|
||||
from typing import AnyStr
|
||||
|
||||
|
@ -195,9 +197,10 @@ The module defines the following classes, functions and decorators:
|
|||
|
||||
Special type indicating an unconstrained type.
|
||||
|
||||
* Any object is an instance of `Any`.
|
||||
* Any class is a subclass of `Any`.
|
||||
* As a special case, `Any` and `object` are subclasses of each other.
|
||||
* Any object is an instance of :class:`Any`.
|
||||
* Any class is a subclass of :class:`Any`.
|
||||
* As a special case, :class:`Any` and :class:`object` are subclasses of
|
||||
each other.
|
||||
|
||||
.. class:: TypeVar
|
||||
|
||||
|
@ -224,22 +227,22 @@ The module defines the following classes, functions and decorators:
|
|||
return x if len(x) >= len(y) else y
|
||||
|
||||
The latter example's signature is essentially the overloading
|
||||
of `(str, str) -> str` and `(bytes, bytes) -> bytes`. Also note
|
||||
that if the arguments are instances of some subclass of `str`,
|
||||
the return type is still plain `str`.
|
||||
of ``(str, str) -> str`` and ``(bytes, bytes) -> bytes``. Also note
|
||||
that if the arguments are instances of some subclass of :class:`str`,
|
||||
the return type is still plain :class:`str`.
|
||||
|
||||
At runtime, `isinstance(x, T)` will raise `TypeError`. In general,
|
||||
`isinstance` and `issublass` should not be used with types.
|
||||
At runtime, ``isinstance(x, T)`` will raise :exc:`TypeError`. In general,
|
||||
:func:`isinstance` and :func:`issublass` should not be used with types.
|
||||
|
||||
Type variables may be marked covariant or contravariant by passing
|
||||
`covariant=True` or `contravariant=True`. See :pep:`484` for more
|
||||
``covariant=True`` or ``contravariant=True``. See :pep:`484` for more
|
||||
details. By default type variables are invariant.
|
||||
|
||||
.. class:: Union
|
||||
|
||||
Union type; `Union[X, Y]` means either X or Y.
|
||||
Union type; ``Union[X, Y]`` means either X or Y.
|
||||
|
||||
To define a union, use e.g. `Union[int, str]`. Details:
|
||||
To define a union, use e.g. ``Union[int, str]``. Details:
|
||||
|
||||
* The arguments must be types and there must be at least one.
|
||||
|
||||
|
@ -259,37 +262,37 @@ The module defines the following classes, functions and decorators:
|
|||
|
||||
Union[int, str] == Union[str, int]
|
||||
|
||||
* If `Any` is present it is the sole survivor, e.g.::
|
||||
* If :class:`Any` is present it is the sole survivor, e.g.::
|
||||
|
||||
Union[int, Any] == Any
|
||||
|
||||
* You cannot subclass or instantiate a union.
|
||||
|
||||
* You cannot write `Union[X][Y]`
|
||||
* You cannot write ``Union[X][Y]``
|
||||
|
||||
* You can use `Optional[X]` as a shorthand for `Union[X, None]`.
|
||||
* You can use ``Optional[X]`` as a shorthand for ``Union[X, None]``.
|
||||
|
||||
.. class:: Optional
|
||||
|
||||
Optional type.
|
||||
|
||||
`Optional[X]` is equivalent to `Union[X, type(None)]`.
|
||||
``Optional[X]`` is equivalent to ``Union[X, type(None)]``.
|
||||
|
||||
.. class:: Tuple
|
||||
|
||||
Tuple type; `Tuple[X, Y]` is the is the type of a tuple of two items
|
||||
Tuple type; ``Tuple[X, Y]`` is the is the type of a tuple of two items
|
||||
with the first item of type X and the second of type Y.
|
||||
|
||||
Example: `Tuple[T1, T2]` is a tuple of two elements corresponding
|
||||
to type variables T1 and T2. `Tuple[int, float, str]` is a tuple
|
||||
Example: ``Tuple[T1, T2]`` is a tuple of two elements corresponding
|
||||
to type variables T1 and T2. ``Tuple[int, float, str]`` is a tuple
|
||||
of an int, a float and a string.
|
||||
|
||||
To specify a variable-length tuple of homogeneous type,
|
||||
use literal ellipsis, e.g. `Tuple[int, ...]`.
|
||||
use literal ellipsis, e.g. ``Tuple[int, ...]``.
|
||||
|
||||
.. class:: Callable
|
||||
|
||||
Callable type; `Callable[[int], str]` is a function of (int) -> str.
|
||||
Callable type; ``Callable[[int], str]`` is a function of (int) -> str.
|
||||
|
||||
The subscription syntax must always be used with exactly two
|
||||
values: the argument list and the return type. The argument list
|
||||
|
@ -297,9 +300,9 @@ The module defines the following classes, functions and decorators:
|
|||
|
||||
There is no syntax to indicate optional or keyword arguments,
|
||||
such function types are rarely used as callback types.
|
||||
`Callable[..., ReturnType]` could be used to type hint a callable
|
||||
taking any number of arguments and returning `ReturnType`.
|
||||
A plain `Callable` is equivalent to `Callable[..., Any]`.
|
||||
``Callable[..., ReturnType]`` could be used to type hint a callable
|
||||
taking any number of arguments and returning ``ReturnType``.
|
||||
A plain :class:`Callable` is equivalent to ``Callable[..., Any]``.
|
||||
|
||||
.. class:: Generic
|
||||
|
||||
|
|
|
@ -278,8 +278,8 @@ The :option:`-s`, :option:`-p`, and :option:`-t` options can be passed in
|
|||
as positional arguments in that order. The following two command lines
|
||||
are equivalent::
|
||||
|
||||
python -m unittest discover -s project_directory -p '*_test.py'
|
||||
python -m unittest discover project_directory '*_test.py'
|
||||
python -m unittest discover -s project_directory -p "*_test.py"
|
||||
python -m unittest discover project_directory "*_test.py"
|
||||
|
||||
As well as being a path it is possible to pass a package name, for example
|
||||
``myproject.subpackage.test``, as the start directory. The package name you
|
||||
|
|
|
@ -651,21 +651,29 @@ Element Objects
|
|||
|
||||
|
||||
.. attribute:: text
|
||||
tail
|
||||
|
||||
The *text* attribute can be used to hold additional data associated with
|
||||
the element. As the name implies this attribute is usually a string but
|
||||
may be any application-specific object. If the element is created from
|
||||
an XML file the attribute will contain any text found between the element
|
||||
tags.
|
||||
These attributes can be used to hold additional data associated with
|
||||
the element. Their values are usually strings but may be any
|
||||
application-specific object. If the element is created from
|
||||
an XML file, the *text* attribute holds either the text between
|
||||
the element's start tag and its first child or end tag, or ``None``, and
|
||||
the *tail* attribute holds either the text between the element's
|
||||
end tag and the next tag, or ``None``. For the XML data
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
.. attribute:: tail
|
||||
<a><b>1<c>2<d/>3</c></b>4</a>
|
||||
|
||||
The *tail* attribute can be used to hold additional data associated with
|
||||
the element. This attribute is usually a string but may be any
|
||||
application-specific object. If the element is created from an XML file
|
||||
the attribute will contain any text found after the element's end tag and
|
||||
before the next tag.
|
||||
the *a* element has ``None`` for both *text* and *tail* attributes,
|
||||
the *b* element has *text* ``"1"`` and *tail* ``"4"``,
|
||||
the *c* element has *text* ``"2"`` and *tail* ``None``,
|
||||
and the *d* element has *text* ``None`` and *tail* ``"3"``.
|
||||
|
||||
To collect the inner text of an element, see :meth:`itertext`, for
|
||||
example ``"".join(element.itertext())``.
|
||||
|
||||
Applications may store arbitrary objects in these attributes.
|
||||
|
||||
|
||||
.. attribute:: attrib
|
||||
|
|
|
@ -288,6 +288,6 @@ library/xml.etree.elementtree,332,:character,"for char in actor.findall('role:ch
|
|||
library/zipapp,31,:main,"$ python -m zipapp myapp -m ""myapp:main"""
|
||||
library/zipapp,82,:fn,"argument should have the form ""pkg.mod:fn"", where ""pkg.mod"" is a"
|
||||
library/zipapp,155,:callable,"""pkg.module:callable"" and the archive will be run by importing"
|
||||
library/stdtypes,3767,::,>>> m[::2].tolist()
|
||||
library/stdtypes,,::,>>> m[::2].tolist()
|
||||
library/sys,1115,`,# ``wrapper`` creates a ``wrap(coro)`` coroutine:
|
||||
tutorial/venv,77,:c7b9645a6f35,"Python 3.4.3+ (3.4:c7b9645a6f35+, May 22 2015, 09:31:25)"
|
||||
|
|
|
|
@ -612,18 +612,18 @@ returns a new sorted list while leaving the source unaltered. ::
|
|||
orange
|
||||
pear
|
||||
|
||||
To change a sequence you are iterating over while inside the loop (for
|
||||
example to duplicate certain items), it is recommended that you first make
|
||||
a copy. Looping over a sequence does not implicitly make a copy. The slice
|
||||
notation makes this especially convenient::
|
||||
It is sometimes tempting to change a list while you are looping over it;
|
||||
however, it is often simpler and safer to create a new list instead. ::
|
||||
|
||||
>>> words = ['cat', 'window', 'defenestrate']
|
||||
>>> for w in words[:]: # Loop over a slice copy of the entire list.
|
||||
... if len(w) > 6:
|
||||
... words.insert(0, w)
|
||||
>>> import math
|
||||
>>> raw_data = [56.2, float('NaN'), 51.7, 55.3, 52.5, float('NaN'), 47.8]
|
||||
>>> filtered_data = []
|
||||
>>> for value in raw_data:
|
||||
... if not math.isnan(value):
|
||||
... filtered_data.append(value)
|
||||
...
|
||||
>>> words
|
||||
['defenestrate', 'cat', 'window', 'defenestrate']
|
||||
>>> filtered_data
|
||||
[56.2, 51.7, 55.3, 52.5, 47.8]
|
||||
|
||||
|
||||
.. _tut-conditions:
|
||||
|
|
|
@ -102,8 +102,9 @@ Configuration
|
|||
Python on OS X honors all standard Unix environment variables such as
|
||||
:envvar:`PYTHONPATH`, but setting these variables for programs started from the
|
||||
Finder is non-standard as the Finder does not read your :file:`.profile` or
|
||||
:file:`.cshrc` at startup. You need to create a file :file:`~
|
||||
/.MacOSX/environment.plist`. See Apple's Technical Document QA1067 for details.
|
||||
:file:`.cshrc` at startup. You need to create a file
|
||||
:file:`~/.MacOSX/environment.plist`. See Apple's Technical Document QA1067 for
|
||||
details.
|
||||
|
||||
For more information on installation Python packages in MacPython, see section
|
||||
:ref:`mac-package-manager`.
|
||||
|
|
|
@ -351,7 +351,8 @@ typedef struct _typeobject {
|
|||
printfunc tp_print;
|
||||
getattrfunc tp_getattr;
|
||||
setattrfunc tp_setattr;
|
||||
PyAsyncMethods *tp_as_async; /* formerly known as tp_compare or tp_reserved */
|
||||
PyAsyncMethods *tp_as_async; /* formerly known as tp_compare (Python 2)
|
||||
or tp_reserved (Python 3) */
|
||||
reprfunc tp_repr;
|
||||
|
||||
/* Method suites for standard classes */
|
||||
|
|
|
@ -8,12 +8,13 @@ import codecs
|
|||
import errno
|
||||
import array
|
||||
import stat
|
||||
import sys
|
||||
# Import _thread instead of threading to reduce startup cost
|
||||
try:
|
||||
from _thread import allocate_lock as Lock
|
||||
except ImportError:
|
||||
from _dummy_thread import allocate_lock as Lock
|
||||
if os.name == 'win32':
|
||||
if sys.platform in {'win32', 'cygwin'}:
|
||||
from msvcrt import setmode as _setmode
|
||||
else:
|
||||
_setmode = None
|
||||
|
|
|
@ -720,6 +720,11 @@ class FieldStorage:
|
|||
self.bytes_read += len(hdr_text)
|
||||
parser.feed(hdr_text.decode(self.encoding, self.errors))
|
||||
headers = parser.close()
|
||||
|
||||
# Some clients add Content-Length for part headers, ignore them
|
||||
if 'content-length' in headers:
|
||||
del headers['content-length']
|
||||
|
||||
part = klass(self.fp, headers, ib, environ, keep_blank_values,
|
||||
strict_parsing,self.limit-self.bytes_read,
|
||||
self.encoding, self.errors)
|
||||
|
|
|
@ -320,23 +320,14 @@ class {typename}(tuple):
|
|||
'Return a nicely formatted representation string'
|
||||
return self.__class__.__name__ + '({repr_fmt})' % self
|
||||
|
||||
@property
|
||||
def __dict__(self):
|
||||
'A new OrderedDict mapping field names to their values'
|
||||
return OrderedDict(zip(self._fields, self))
|
||||
|
||||
def _asdict(self):
|
||||
'Return a new OrderedDict which maps field names to their values.'
|
||||
return self.__dict__
|
||||
return OrderedDict(zip(self._fields, self))
|
||||
|
||||
def __getnewargs__(self):
|
||||
'Return self as a plain tuple. Used by copy and pickle.'
|
||||
return tuple(self)
|
||||
|
||||
def __getstate__(self):
|
||||
'Exclude the OrderedDict from pickling'
|
||||
return None
|
||||
|
||||
{field_defs}
|
||||
"""
|
||||
|
||||
|
|
|
@ -263,12 +263,9 @@ class InterpolationMissingOptionError(InterpolationError):
|
|||
"""A string substitution required a setting which was not available."""
|
||||
|
||||
def __init__(self, option, section, rawval, reference):
|
||||
msg = ("Bad value substitution:\n"
|
||||
"\tsection: [%s]\n"
|
||||
"\toption : %s\n"
|
||||
"\tkey : %s\n"
|
||||
"\trawval : %s\n"
|
||||
% (section, option, reference, rawval))
|
||||
msg = ("Bad value substitution: option {!r} in section {!r} contains "
|
||||
"an interpolation key {!r} which is not a valid option name. "
|
||||
"Raw value: {!r}".format(option, section, reference, rawval))
|
||||
InterpolationError.__init__(self, option, section, msg)
|
||||
self.reference = reference
|
||||
self.args = (option, section, rawval, reference)
|
||||
|
@ -286,11 +283,11 @@ class InterpolationDepthError(InterpolationError):
|
|||
"""Raised when substitutions are nested too deeply."""
|
||||
|
||||
def __init__(self, option, section, rawval):
|
||||
msg = ("Value interpolation too deeply recursive:\n"
|
||||
"\tsection: [%s]\n"
|
||||
"\toption : %s\n"
|
||||
"\trawval : %s\n"
|
||||
% (section, option, rawval))
|
||||
msg = ("Recursion limit exceeded in value substitution: option {!r} "
|
||||
"in section {!r} contains an interpolation key which "
|
||||
"cannot be substituted in {} steps. Raw value: {!r}"
|
||||
"".format(option, section, MAX_INTERPOLATION_DEPTH,
|
||||
rawval))
|
||||
InterpolationError.__init__(self, option, section, msg)
|
||||
self.args = (option, section, rawval)
|
||||
|
||||
|
@ -406,8 +403,9 @@ class BasicInterpolation(Interpolation):
|
|||
|
||||
def _interpolate_some(self, parser, option, accum, rest, section, map,
|
||||
depth):
|
||||
rawval = parser.get(section, option, raw=True, fallback=rest)
|
||||
if depth > MAX_INTERPOLATION_DEPTH:
|
||||
raise InterpolationDepthError(option, section, rest)
|
||||
raise InterpolationDepthError(option, section, rawval)
|
||||
while rest:
|
||||
p = rest.find("%")
|
||||
if p < 0:
|
||||
|
@ -432,7 +430,7 @@ class BasicInterpolation(Interpolation):
|
|||
v = map[var]
|
||||
except KeyError:
|
||||
raise InterpolationMissingOptionError(
|
||||
option, section, rest, var) from None
|
||||
option, section, rawval, var) from None
|
||||
if "%" in v:
|
||||
self._interpolate_some(parser, option, accum, v,
|
||||
section, map, depth + 1)
|
||||
|
@ -466,8 +464,9 @@ class ExtendedInterpolation(Interpolation):
|
|||
|
||||
def _interpolate_some(self, parser, option, accum, rest, section, map,
|
||||
depth):
|
||||
rawval = parser.get(section, option, raw=True, fallback=rest)
|
||||
if depth > MAX_INTERPOLATION_DEPTH:
|
||||
raise InterpolationDepthError(option, section, rest)
|
||||
raise InterpolationDepthError(option, section, rawval)
|
||||
while rest:
|
||||
p = rest.find("$")
|
||||
if p < 0:
|
||||
|
@ -504,7 +503,7 @@ class ExtendedInterpolation(Interpolation):
|
|||
"More than one ':' found: %r" % (rest,))
|
||||
except (KeyError, NoSectionError, NoOptionError):
|
||||
raise InterpolationMissingOptionError(
|
||||
option, section, rest, ":".join(path)) from None
|
||||
option, section, rawval, ":".join(path)) from None
|
||||
if "$" in v:
|
||||
self._interpolate_some(parser, opt, accum, v, sect,
|
||||
dict(parser.items(sect, raw=True)),
|
||||
|
|
|
@ -567,7 +567,7 @@ def _c3_merge(sequences):
|
|||
break # reject the current head, it appears later
|
||||
else:
|
||||
break
|
||||
if not candidate:
|
||||
if candidate is None:
|
||||
raise RuntimeError("Inconsistent hierarchy")
|
||||
result.append(candidate)
|
||||
# remove the chosen candidate
|
||||
|
|
|
@ -139,7 +139,15 @@ class HTMLParser(_markupbase.ParserBase):
|
|||
if self.convert_charrefs and not self.cdata_elem:
|
||||
j = rawdata.find('<', i)
|
||||
if j < 0:
|
||||
if not end:
|
||||
# if we can't find the next <, either we are at the end
|
||||
# or there's more text incoming. If the latter is True,
|
||||
# we can't pass the text to handle_data in case we have
|
||||
# a charref cut in half at end. Try to determine if
|
||||
# this is the case before proceding by looking for an
|
||||
# & near the end and see if it's followed by a space or ;.
|
||||
amppos = rawdata.rfind('&', max(i, n-34))
|
||||
if (amppos >= 0 and
|
||||
not re.compile(r'[\s;]').search(rawdata, amppos)):
|
||||
break # wait till we get all the text
|
||||
j = n
|
||||
else:
|
||||
|
|
|
@ -1167,8 +1167,7 @@ def test(HandlerClass=BaseHTTPRequestHandler,
|
|||
ServerClass=HTTPServer, protocol="HTTP/1.0", port=8000, bind=""):
|
||||
"""Test the HTTP request handler class.
|
||||
|
||||
This runs an HTTP server on port 8000 (or the first command line
|
||||
argument).
|
||||
This runs an HTTP server on port 8000 (or the port argument).
|
||||
|
||||
"""
|
||||
server_address = (bind, port)
|
||||
|
|
|
@ -2,6 +2,19 @@ What's New in IDLE 3.5.0?
|
|||
=========================
|
||||
*Release date: 2015-09-13* ??
|
||||
|
||||
- Issue #23672: Allow Idle to edit and run files with astral chars in name.
|
||||
Patch by Mohd Sanad Zaki Rizvi.
|
||||
|
||||
- Issue 24745: Idle editor default font. Switch from Courier to
|
||||
platform-sensitive TkFixedFont. This should not affect current customized
|
||||
font selections. If there is a problem, edit $HOME/.idlerc/config-main.cfg
|
||||
and remove 'fontxxx' entries from [Editor Window]. Patch by Mark Roseman.
|
||||
|
||||
- Issue #21192: Idle editor. When a file is run, put its name in the restart bar.
|
||||
Do not print false prompts. Original patch by Adnan Umer.
|
||||
|
||||
- Issue #13884: Idle menus. Remove tearoff lines. Patch by Roger Serwy.
|
||||
|
||||
- Issue #23184: remove unused names and imports in idlelib.
|
||||
Initial patch by Al Sweigart.
|
||||
|
||||
|
|
|
@ -1043,6 +1043,7 @@ class PyShell(OutputWindow):
|
|||
|
||||
self.write("Python %s on %s\n%s\n%s" %
|
||||
(sys.version, sys.platform, self.COPYRIGHT, nosub))
|
||||
self.text.focus_force()
|
||||
self.showprompt()
|
||||
import tkinter
|
||||
tkinter._default_root = None # 03Jan04 KBK What's this?
|
||||
|
|
|
@ -69,7 +69,7 @@ class ScriptBinding:
|
|||
try:
|
||||
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
|
||||
except tokenize.TokenError as msg:
|
||||
msgtxt, (lineno, start) = msg
|
||||
msgtxt, (lineno, start) = msg.args
|
||||
self.editwin.gotoline(lineno)
|
||||
self.errorbox("Tabnanny Tokenizing Error",
|
||||
"Token Error: %s" % msgtxt)
|
||||
|
|
|
@ -10,8 +10,7 @@ from idlelib.PyShell import PyShellFileList
|
|||
|
||||
def StackBrowser(root, flist=None, tb=None, top=None):
|
||||
if top is None:
|
||||
from tkinter import Toplevel
|
||||
top = Toplevel(root)
|
||||
top = tk.Toplevel(root)
|
||||
sc = ScrolledCanvas(top, bg="white", highlightthickness=0)
|
||||
sc.frame.pack(expand=1, fill="both")
|
||||
item = StackTreeItem(flist, tb)
|
||||
|
@ -108,12 +107,9 @@ class VariablesTreeItem(ObjectTreeItem):
|
|||
def IsExpandable(self):
|
||||
return len(self.object) > 0
|
||||
|
||||
def keys(self):
|
||||
return list(self.object.keys())
|
||||
|
||||
def GetSubList(self):
|
||||
sublist = []
|
||||
for key in self.keys():
|
||||
for key in self.object.keys():
|
||||
try:
|
||||
value = self.object[key]
|
||||
except KeyError:
|
||||
|
@ -124,6 +120,9 @@ class VariablesTreeItem(ObjectTreeItem):
|
|||
sublist.append(item)
|
||||
return sublist
|
||||
|
||||
def keys(self): # unused, left for possible 3rd party use
|
||||
return list(self.object.keys())
|
||||
|
||||
def _stack_viewer(parent):
|
||||
root = tk.Tk()
|
||||
root.title("Test StackViewer")
|
||||
|
|
|
@ -1201,9 +1201,6 @@ class VerticalScrolledFrame(Frame):
|
|||
# update the scrollbars to match the size of the inner frame
|
||||
size = (interior.winfo_reqwidth(), interior.winfo_reqheight())
|
||||
canvas.config(scrollregion="0 0 %s %s" % size)
|
||||
if interior.winfo_reqwidth() != canvas.winfo_width():
|
||||
# update the canvas's width to fit the inner frame
|
||||
canvas.config(width=interior.winfo_reqwidth())
|
||||
interior.bind('<Configure>', _configure_interior)
|
||||
|
||||
def _configure_canvas(event):
|
||||
|
@ -1323,38 +1320,56 @@ class ConfigExtensionsDialog(Toplevel):
|
|||
|
||||
def create_widgets(self):
|
||||
"""Create the dialog's widgets."""
|
||||
self.extension_names = StringVar(self)
|
||||
self.rowconfigure(0, weight=1)
|
||||
self.rowconfigure(1, weight=0)
|
||||
self.columnconfigure(0, weight=1)
|
||||
self.columnconfigure(2, weight=1)
|
||||
self.extension_list = Listbox(self, listvariable=self.extension_names,
|
||||
selectmode='browse')
|
||||
self.extension_list.bind('<<ListboxSelect>>', self.extension_selected)
|
||||
scroll = Scrollbar(self, command=self.extension_list.yview)
|
||||
self.extension_list.yscrollcommand=scroll.set
|
||||
self.details_frame = LabelFrame(self, width=250, height=250)
|
||||
self.extension_list.grid(column=0, row=0, sticky='nws')
|
||||
scroll.grid(column=1, row=0, sticky='ns')
|
||||
self.details_frame.grid(column=2, row=0, sticky='nsew', padx=[10, 0])
|
||||
self.configure(padx=10, pady=10)
|
||||
self.config_frame = {}
|
||||
self.current_extension = None
|
||||
|
||||
# create the tabbed pages
|
||||
self.tabbed_page_set = TabbedPageSet(
|
||||
self, page_names=self.extensions.keys(),
|
||||
n_rows=None, max_tabs_per_row=5,
|
||||
page_class=TabbedPageSet.PageRemove)
|
||||
self.tabbed_page_set.grid(row=0, column=0, sticky=NSEW)
|
||||
for ext_name in self.extensions:
|
||||
self.create_tab_page(ext_name)
|
||||
self.outerframe = self # TEMPORARY
|
||||
self.tabbed_page_set = self.extension_list # TEMPORARY
|
||||
|
||||
self.create_action_buttons().grid(row=1)
|
||||
# create the individual pages
|
||||
ext_names = ''
|
||||
for ext_name in sorted(self.extensions):
|
||||
self.create_extension_frame(ext_name)
|
||||
ext_names = ext_names + '{' + ext_name + '} '
|
||||
self.extension_names.set(ext_names)
|
||||
self.extension_list.selection_set(0)
|
||||
self.extension_selected(None)
|
||||
self.create_action_buttons().grid(row=1, columnspan=3)
|
||||
|
||||
def extension_selected(self, event):
|
||||
newsel = self.extension_list.curselection()
|
||||
if newsel:
|
||||
newsel = self.extension_list.get(newsel)
|
||||
if newsel is None or newsel != self.current_extension:
|
||||
if self.current_extension:
|
||||
self.details_frame.config(text='')
|
||||
self.config_frame[self.current_extension].grid_forget()
|
||||
self.current_extension = None
|
||||
if newsel:
|
||||
self.details_frame.config(text=newsel)
|
||||
self.config_frame[newsel].grid(column=0, row=0, sticky='nsew')
|
||||
self.current_extension = newsel
|
||||
|
||||
create_action_buttons = ConfigDialog.create_action_buttons
|
||||
|
||||
def create_tab_page(self, ext_name):
|
||||
"""Create the page for an extension."""
|
||||
|
||||
page = LabelFrame(self.tabbed_page_set.pages[ext_name].frame,
|
||||
border=2, padx=2, relief=GROOVE,
|
||||
text=' %s ' % ext_name)
|
||||
page.pack(fill=BOTH, expand=True, padx=12, pady=2)
|
||||
|
||||
# create the scrollable frame which will contain the entries
|
||||
scrolled_frame = VerticalScrolledFrame(page, pady=2, height=250)
|
||||
scrolled_frame.pack(side=BOTTOM, fill=BOTH, expand=TRUE)
|
||||
entry_area = scrolled_frame.interior
|
||||
entry_area.columnconfigure(0, weight=0)
|
||||
entry_area.columnconfigure(1, weight=1)
|
||||
|
||||
def create_extension_frame(self, ext_name):
|
||||
"""Create a frame holding the widgets to configure one extension"""
|
||||
f = VerticalScrolledFrame(self.details_frame, height=250, width=250)
|
||||
self.config_frame[ext_name] = f
|
||||
entry_area = f.interior
|
||||
# create an entry for each configuration option
|
||||
for row, opt in enumerate(self.extensions[ext_name]):
|
||||
# create a row with a label and entry/checkbutton
|
||||
|
@ -1365,15 +1380,15 @@ class ConfigExtensionsDialog(Toplevel):
|
|||
Checkbutton(entry_area, textvariable=var, variable=var,
|
||||
onvalue='True', offvalue='False',
|
||||
indicatoron=FALSE, selectcolor='', width=8
|
||||
).grid(row=row, column=1, sticky=W, padx=7)
|
||||
).grid(row=row, column=1, sticky=W, padx=7)
|
||||
elif opt['type'] == 'int':
|
||||
Entry(entry_area, textvariable=var, validate='key',
|
||||
validatecommand=(self.is_int, '%P')
|
||||
).grid(row=row, column=1, sticky=NSEW, padx=7)
|
||||
validatecommand=(self.is_int, '%P')
|
||||
).grid(row=row, column=1, sticky=NSEW, padx=7)
|
||||
|
||||
else:
|
||||
Entry(entry_area, textvariable=var
|
||||
).grid(row=row, column=1, sticky=NSEW, padx=7)
|
||||
).grid(row=row, column=1, sticky=NSEW, padx=7)
|
||||
return
|
||||
|
||||
|
||||
|
|
|
@ -1669,6 +1669,9 @@ def main():
|
|||
# In most cases SystemExit does not warrant a post-mortem session.
|
||||
print("The program exited via sys.exit(). Exit status:", end=' ')
|
||||
print(sys.exc_info()[1])
|
||||
except SyntaxError:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
print("Uncaught exception. Entering post mortem debugging")
|
||||
|
|
|
@ -326,6 +326,24 @@ Content-Type: text/plain
|
|||
got = getattr(files[x], k)
|
||||
self.assertEqual(got, exp)
|
||||
|
||||
def test_fieldstorage_part_content_length(self):
|
||||
BOUNDARY = "JfISa01"
|
||||
POSTDATA = """--JfISa01
|
||||
Content-Disposition: form-data; name="submit-name"
|
||||
Content-Length: 5
|
||||
|
||||
Larry
|
||||
--JfISa01"""
|
||||
env = {
|
||||
'REQUEST_METHOD': 'POST',
|
||||
'CONTENT_TYPE': 'multipart/form-data; boundary={}'.format(BOUNDARY),
|
||||
'CONTENT_LENGTH': str(len(POSTDATA))}
|
||||
fp = BytesIO(POSTDATA.encode('latin-1'))
|
||||
fs = cgi.FieldStorage(fp, environ=env, encoding="latin-1")
|
||||
self.assertEqual(len(fs.list), 1)
|
||||
self.assertEqual(fs.list[0].name, 'submit-name')
|
||||
self.assertEqual(fs.list[0].value, 'Larry')
|
||||
|
||||
def test_fieldstorage_as_context_manager(self):
|
||||
fp = BytesIO(b'x' * 10)
|
||||
env = {'REQUEST_METHOD': 'PUT'}
|
||||
|
|
|
@ -257,7 +257,6 @@ class TestNamedTuple(unittest.TestCase):
|
|||
self.assertEqual(p._fields, ('x', 'y')) # test _fields attribute
|
||||
self.assertEqual(p._replace(x=1), (1, 22)) # test _replace method
|
||||
self.assertEqual(p._asdict(), dict(x=11, y=22)) # test _asdict method
|
||||
self.assertEqual(vars(p), p._asdict()) # verify that vars() works
|
||||
|
||||
try:
|
||||
p._replace(x=1, error=2)
|
||||
|
@ -412,6 +411,17 @@ class TestNamedTuple(unittest.TestCase):
|
|||
globals().pop('NTColor', None) # clean-up after this test
|
||||
|
||||
|
||||
def test_namedtuple_subclass_issue_24931(self):
|
||||
class Point(namedtuple('_Point', ['x', 'y'])):
|
||||
pass
|
||||
|
||||
a = Point(3, 4)
|
||||
self.assertEqual(a._asdict(), OrderedDict([('x', 3), ('y', 4)]))
|
||||
|
||||
a.w = 5
|
||||
self.assertEqual(a.__dict__, {'w': 5})
|
||||
|
||||
|
||||
################################################################################
|
||||
### Abstract Base Classes
|
||||
################################################################################
|
||||
|
|
|
@ -847,7 +847,8 @@ class ConfigParserTestCase(BasicTestCase, unittest.TestCase):
|
|||
"something with lots of interpolation (10 steps)")
|
||||
e = self.get_error(cf, configparser.InterpolationDepthError, "Foo", "bar11")
|
||||
if self.interpolation == configparser._UNSET:
|
||||
self.assertEqual(e.args, ("bar11", "Foo", "%(with1)s"))
|
||||
self.assertEqual(e.args, ("bar11", "Foo",
|
||||
"something %(with11)s lots of interpolation (11 steps)"))
|
||||
elif isinstance(self.interpolation, configparser.LegacyInterpolation):
|
||||
self.assertEqual(e.args, ("bar11", "Foo",
|
||||
"something %(with11)s lots of interpolation (11 steps)"))
|
||||
|
@ -861,7 +862,7 @@ class ConfigParserTestCase(BasicTestCase, unittest.TestCase):
|
|||
self.assertEqual(e.option, "name")
|
||||
if self.interpolation == configparser._UNSET:
|
||||
self.assertEqual(e.args, ('name', 'Interpolation Error',
|
||||
'', 'reference'))
|
||||
'%(reference)s', 'reference'))
|
||||
elif isinstance(self.interpolation, configparser.LegacyInterpolation):
|
||||
self.assertEqual(e.args, ('name', 'Interpolation Error',
|
||||
'%(reference)s', 'reference'))
|
||||
|
@ -1177,7 +1178,7 @@ class ConfigParserTestCaseExtendedInterpolation(BasicTestCase, unittest.TestCase
|
|||
with self.assertRaises(exception_class) as cm:
|
||||
cf['interpolated']['$trying']
|
||||
self.assertEqual(cm.exception.reference, 'dollars:${sick')
|
||||
self.assertEqual(cm.exception.args[2], '}') #rawval
|
||||
self.assertEqual(cm.exception.args[2], '${dollars:${sick}}') #rawval
|
||||
|
||||
def test_case_sensitivity_basic(self):
|
||||
ini = textwrap.dedent("""
|
||||
|
|
|
@ -1491,6 +1491,24 @@ class TestSingleDispatch(unittest.TestCase):
|
|||
many_abcs = [c.Mapping, c.Sized, c.Callable, c.Container, c.Iterable]
|
||||
self.assertEqual(mro(X, abcs=many_abcs), expected)
|
||||
|
||||
def test_false_meta(self):
|
||||
# see issue23572
|
||||
class MetaA(type):
|
||||
def __len__(self):
|
||||
return 0
|
||||
class A(metaclass=MetaA):
|
||||
pass
|
||||
class AA(A):
|
||||
pass
|
||||
@functools.singledispatch
|
||||
def fun(a):
|
||||
return 'base A'
|
||||
@fun.register(A)
|
||||
def _(a):
|
||||
return 'fun A'
|
||||
aa = AA()
|
||||
self.assertEqual(fun(aa), 'fun A')
|
||||
|
||||
def test_mro_conflicts(self):
|
||||
c = collections
|
||||
@functools.singledispatch
|
||||
|
|
|
@ -21,19 +21,34 @@ except ImportError:
|
|||
from test import support
|
||||
from test.support import run_unittest, findfile, python_is_optimized
|
||||
|
||||
try:
|
||||
gdb_version, _ = subprocess.Popen(["gdb", "-nx", "--version"],
|
||||
stdout=subprocess.PIPE).communicate()
|
||||
except OSError:
|
||||
# This is what "no gdb" looks like. There may, however, be other
|
||||
# errors that manifest this way too.
|
||||
raise unittest.SkipTest("Couldn't find gdb on the path")
|
||||
gdb_version_number = re.search(b"^GNU gdb [^\d]*(\d+)\.(\d)", gdb_version)
|
||||
gdb_major_version = int(gdb_version_number.group(1))
|
||||
gdb_minor_version = int(gdb_version_number.group(2))
|
||||
def get_gdb_version():
|
||||
try:
|
||||
proc = subprocess.Popen(["gdb", "-nx", "--version"],
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
with proc:
|
||||
version = proc.communicate()[0]
|
||||
except OSError:
|
||||
# This is what "no gdb" looks like. There may, however, be other
|
||||
# errors that manifest this way too.
|
||||
raise unittest.SkipTest("Couldn't find gdb on the path")
|
||||
|
||||
# Regex to parse:
|
||||
# 'GNU gdb (GDB; SUSE Linux Enterprise 12) 7.7\n' -> 7.7
|
||||
# 'GNU gdb (GDB) Fedora 7.9.1-17.fc22\n' -> 7.9
|
||||
# 'GNU gdb 6.1.1 [FreeBSD]\n' -> 6.1
|
||||
# 'GNU gdb (GDB) Fedora (7.5.1-37.fc18)\n' -> 7.5
|
||||
match = re.search(r"^GNU gdb.*?\b(\d+)\.(\d)", version)
|
||||
if match is None:
|
||||
raise Exception("unable to parse GDB version: %r" % version)
|
||||
return (version, int(match.group(1)), int(match.group(2)))
|
||||
|
||||
gdb_version, gdb_major_version, gdb_minor_version = get_gdb_version()
|
||||
if gdb_major_version < 7:
|
||||
raise unittest.SkipTest("gdb versions before 7.0 didn't support python embedding"
|
||||
" Saw:\n" + gdb_version.decode('ascii', 'replace'))
|
||||
raise unittest.SkipTest("gdb versions before 7.0 didn't support python "
|
||||
"embedding. Saw %s.%s:\n%s"
|
||||
% (gdb_major_version, gdb_minor_version,
|
||||
gdb_version))
|
||||
|
||||
if not sysconfig.is_python_build():
|
||||
raise unittest.SkipTest("test_gdb only works on source builds at the moment.")
|
||||
|
@ -59,9 +74,12 @@ def run_gdb(*args, **env_vars):
|
|||
base_cmd = ('gdb', '--batch', '-nx')
|
||||
if (gdb_major_version, gdb_minor_version) >= (7, 4):
|
||||
base_cmd += ('-iex', 'add-auto-load-safe-path ' + checkout_hook_path)
|
||||
out, err = subprocess.Popen(base_cmd + args,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env,
|
||||
).communicate()
|
||||
proc = subprocess.Popen(base_cmd + args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
with proc:
|
||||
out, err = proc.communicate()
|
||||
return out.decode('utf-8', 'replace'), err.decode('utf-8', 'replace')
|
||||
|
||||
# Verify that "gdb" was built with the embedded python support enabled:
|
||||
|
@ -880,8 +898,8 @@ class PyLocalsTests(DebuggerTests):
|
|||
|
||||
def test_main():
|
||||
if support.verbose:
|
||||
print("GDB version:")
|
||||
for line in os.fsdecode(gdb_version).splitlines():
|
||||
print("GDB version %s.%s:" % (gdb_major_version, gdb_minor_version))
|
||||
for line in gdb_version.splitlines():
|
||||
print(" " * 4 + line)
|
||||
run_unittest(PrettyPrintTests,
|
||||
PyListTests,
|
||||
|
|
|
@ -242,9 +242,7 @@ class GlobTests(unittest.TestCase):
|
|||
('a', 'bcd', 'EF'), ('a', 'bcd', 'efg')))
|
||||
eq(self.rglob('a', '**', 'bcd'), self.joins(('a', 'bcd')))
|
||||
|
||||
predir = os.path.abspath(os.curdir)
|
||||
try:
|
||||
os.chdir(self.tempdir)
|
||||
with change_cwd(self.tempdir):
|
||||
join = os.path.join
|
||||
eq(glob.glob('**', recursive=True), [join(*i) for i in full])
|
||||
eq(glob.glob(join('**', ''), recursive=True),
|
||||
|
@ -256,8 +254,6 @@ class GlobTests(unittest.TestCase):
|
|||
if can_symlink():
|
||||
expect += [join('sym3', 'EF')]
|
||||
eq(glob.glob(join('**', 'EF'), recursive=True), expect)
|
||||
finally:
|
||||
os.chdir(predir)
|
||||
|
||||
|
||||
@skip_unless_symlink
|
||||
|
|
|
@ -72,9 +72,6 @@ class EventCollectorExtra(EventCollector):
|
|||
|
||||
class EventCollectorCharrefs(EventCollector):
|
||||
|
||||
def get_events(self):
|
||||
return self.events
|
||||
|
||||
def handle_charref(self, data):
|
||||
self.fail('This should never be called with convert_charrefs=True')
|
||||
|
||||
|
@ -633,6 +630,18 @@ text
|
|||
]
|
||||
self._run_check(html, expected)
|
||||
|
||||
def test_convert_charrefs_dropped_text(self):
|
||||
# #23144: make sure that all the events are triggered when
|
||||
# convert_charrefs is True, even if we don't call .close()
|
||||
parser = EventCollector(convert_charrefs=True)
|
||||
# before the fix, bar & baz was missing
|
||||
parser.feed("foo <a>link</a> bar & baz")
|
||||
self.assertEqual(
|
||||
parser.get_events(),
|
||||
[('data', 'foo '), ('starttag', 'a', []), ('data', 'link'),
|
||||
('endtag', 'a'), ('data', ' bar & baz')]
|
||||
)
|
||||
|
||||
|
||||
class AttributesTestCase(TestCaseBase):
|
||||
|
||||
|
|
|
@ -731,7 +731,10 @@ class LargeMmapTests(unittest.TestCase):
|
|||
f.write(tail)
|
||||
f.flush()
|
||||
except (OSError, OverflowError):
|
||||
f.close()
|
||||
try:
|
||||
f.close()
|
||||
except (OSError, OverflowError):
|
||||
pass
|
||||
raise unittest.SkipTest("filesystem does not have largefile support")
|
||||
return f
|
||||
|
||||
|
|
|
@ -1043,6 +1043,18 @@ class PdbTestCase(unittest.TestCase):
|
|||
self.assertNotIn('Error', stdout.decode(),
|
||||
"Got an error running test script under PDB")
|
||||
|
||||
def test_issue16180(self):
|
||||
# A syntax error in the debuggee.
|
||||
script = "def f: pass\n"
|
||||
commands = ''
|
||||
expected = "SyntaxError:"
|
||||
stdout, stderr = self.run_pdb(script, commands)
|
||||
self.assertIn(expected, stdout,
|
||||
'\n\nExpected:\n{}\nGot:\n{}\n'
|
||||
'Fail to handle a syntax error in the debuggee.'
|
||||
.format(expected, stdout))
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
support.unlink(support.TESTFN)
|
||||
|
||||
|
|
|
@ -158,17 +158,11 @@ class UnicodeFileTests(unittest.TestCase):
|
|||
def test_directory(self):
|
||||
dirname = os.path.join(support.TESTFN, 'Gr\xfc\xdf-\u66e8\u66e9\u66eb')
|
||||
filename = '\xdf-\u66e8\u66e9\u66eb'
|
||||
oldwd = os.getcwd()
|
||||
os.mkdir(dirname)
|
||||
os.chdir(dirname)
|
||||
try:
|
||||
with support.temp_cwd(dirname):
|
||||
with open(filename, 'wb') as f:
|
||||
f.write((filename + '\n').encode("utf-8"))
|
||||
os.access(filename,os.R_OK)
|
||||
os.remove(filename)
|
||||
finally:
|
||||
os.chdir(oldwd)
|
||||
os.rmdir(dirname)
|
||||
|
||||
|
||||
class UnicodeNFCFileTests(UnicodeFileTests):
|
||||
|
|
|
@ -316,7 +316,6 @@ class PosixPathTest(unittest.TestCase):
|
|||
# Bug #930024, return the path unchanged if we get into an infinite
|
||||
# symlink loop.
|
||||
try:
|
||||
old_path = abspath('.')
|
||||
os.symlink(ABSTFN, ABSTFN)
|
||||
self.assertEqual(realpath(ABSTFN), ABSTFN)
|
||||
|
||||
|
@ -342,10 +341,9 @@ class PosixPathTest(unittest.TestCase):
|
|||
self.assertEqual(realpath(ABSTFN+"c"), ABSTFN+"c")
|
||||
|
||||
# Test using relative path as well.
|
||||
os.chdir(dirname(ABSTFN))
|
||||
self.assertEqual(realpath(basename(ABSTFN)), ABSTFN)
|
||||
with support.change_cwd(dirname(ABSTFN)):
|
||||
self.assertEqual(realpath(basename(ABSTFN)), ABSTFN)
|
||||
finally:
|
||||
os.chdir(old_path)
|
||||
support.unlink(ABSTFN)
|
||||
support.unlink(ABSTFN+"1")
|
||||
support.unlink(ABSTFN+"2")
|
||||
|
@ -373,7 +371,6 @@ class PosixPathTest(unittest.TestCase):
|
|||
@skip_if_ABSTFN_contains_backslash
|
||||
def test_realpath_deep_recursion(self):
|
||||
depth = 10
|
||||
old_path = abspath('.')
|
||||
try:
|
||||
os.mkdir(ABSTFN)
|
||||
for i in range(depth):
|
||||
|
@ -382,10 +379,9 @@ class PosixPathTest(unittest.TestCase):
|
|||
self.assertEqual(realpath(ABSTFN + '/%d' % depth), ABSTFN)
|
||||
|
||||
# Test using relative path as well.
|
||||
os.chdir(ABSTFN)
|
||||
self.assertEqual(realpath('%d' % depth), ABSTFN)
|
||||
with support.change_cwd(ABSTFN):
|
||||
self.assertEqual(realpath('%d' % depth), ABSTFN)
|
||||
finally:
|
||||
os.chdir(old_path)
|
||||
for i in range(depth + 1):
|
||||
support.unlink(ABSTFN + '/%d' % i)
|
||||
safe_rmdir(ABSTFN)
|
||||
|
@ -399,15 +395,13 @@ class PosixPathTest(unittest.TestCase):
|
|||
# /usr/doc with 'doc' being a symlink to /usr/share/doc. We call
|
||||
# realpath("a"). This should return /usr/share/doc/a/.
|
||||
try:
|
||||
old_path = abspath('.')
|
||||
os.mkdir(ABSTFN)
|
||||
os.mkdir(ABSTFN + "/y")
|
||||
os.symlink(ABSTFN + "/y", ABSTFN + "/k")
|
||||
|
||||
os.chdir(ABSTFN + "/k")
|
||||
self.assertEqual(realpath("a"), ABSTFN + "/y/a")
|
||||
with support.change_cwd(ABSTFN + "/k"):
|
||||
self.assertEqual(realpath("a"), ABSTFN + "/y/a")
|
||||
finally:
|
||||
os.chdir(old_path)
|
||||
support.unlink(ABSTFN + "/k")
|
||||
safe_rmdir(ABSTFN + "/y")
|
||||
safe_rmdir(ABSTFN)
|
||||
|
@ -424,7 +418,6 @@ class PosixPathTest(unittest.TestCase):
|
|||
# and a symbolic link 'link-y' pointing to 'y' in directory 'a',
|
||||
# then realpath("link-y/..") should return 'k', not 'a'.
|
||||
try:
|
||||
old_path = abspath('.')
|
||||
os.mkdir(ABSTFN)
|
||||
os.mkdir(ABSTFN + "/k")
|
||||
os.mkdir(ABSTFN + "/k/y")
|
||||
|
@ -433,11 +426,10 @@ class PosixPathTest(unittest.TestCase):
|
|||
# Absolute path.
|
||||
self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k")
|
||||
# Relative path.
|
||||
os.chdir(dirname(ABSTFN))
|
||||
self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
|
||||
ABSTFN + "/k")
|
||||
with support.change_cwd(dirname(ABSTFN)):
|
||||
self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
|
||||
ABSTFN + "/k")
|
||||
finally:
|
||||
os.chdir(old_path)
|
||||
support.unlink(ABSTFN + "/link-y")
|
||||
safe_rmdir(ABSTFN + "/k/y")
|
||||
safe_rmdir(ABSTFN + "/k")
|
||||
|
@ -451,17 +443,14 @@ class PosixPathTest(unittest.TestCase):
|
|||
# must be resolved too.
|
||||
|
||||
try:
|
||||
old_path = abspath('.')
|
||||
os.mkdir(ABSTFN)
|
||||
os.mkdir(ABSTFN + "/k")
|
||||
os.symlink(ABSTFN, ABSTFN + "link")
|
||||
os.chdir(dirname(ABSTFN))
|
||||
|
||||
base = basename(ABSTFN)
|
||||
self.assertEqual(realpath(base + "link"), ABSTFN)
|
||||
self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
|
||||
with support.change_cwd(dirname(ABSTFN)):
|
||||
base = basename(ABSTFN)
|
||||
self.assertEqual(realpath(base + "link"), ABSTFN)
|
||||
self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
|
||||
finally:
|
||||
os.chdir(old_path)
|
||||
support.unlink(ABSTFN + "link")
|
||||
safe_rmdir(ABSTFN + "/k")
|
||||
safe_rmdir(ABSTFN)
|
||||
|
|
|
@ -63,11 +63,9 @@ class PyCompileTests(unittest.TestCase):
|
|||
self.assertTrue(os.path.exists(self.cache_path))
|
||||
|
||||
def test_cwd(self):
|
||||
cwd = os.getcwd()
|
||||
os.chdir(self.directory)
|
||||
py_compile.compile(os.path.basename(self.source_path),
|
||||
os.path.basename(self.pyc_path))
|
||||
os.chdir(cwd)
|
||||
with support.change_cwd(self.directory):
|
||||
py_compile.compile(os.path.basename(self.source_path),
|
||||
os.path.basename(self.pyc_path))
|
||||
self.assertTrue(os.path.exists(self.pyc_path))
|
||||
self.assertFalse(os.path.exists(self.cache_path))
|
||||
|
||||
|
|
|
@ -12,11 +12,9 @@ import errno
|
|||
import functools
|
||||
import subprocess
|
||||
from contextlib import ExitStack
|
||||
from test import support
|
||||
from test.support import TESTFN
|
||||
from os.path import splitdrive
|
||||
from distutils.spawn import find_executable, spawn
|
||||
from shutil import (_make_tarball, _make_zipfile, make_archive,
|
||||
from shutil import (make_archive,
|
||||
register_archive_format, unregister_archive_format,
|
||||
get_archive_formats, Error, unpack_archive,
|
||||
register_unpack_format, RegistryError,
|
||||
|
@ -94,6 +92,18 @@ def read_file(path, binary=False):
|
|||
with open(path, 'rb' if binary else 'r') as fp:
|
||||
return fp.read()
|
||||
|
||||
def rlistdir(path):
|
||||
res = []
|
||||
for name in sorted(os.listdir(path)):
|
||||
p = os.path.join(path, name)
|
||||
if os.path.isdir(p) and not os.path.islink(p):
|
||||
res.append(name + '/')
|
||||
for n in rlistdir(p):
|
||||
res.append(name + '/' + n)
|
||||
else:
|
||||
res.append(name)
|
||||
return res
|
||||
|
||||
|
||||
class TestShutil(unittest.TestCase):
|
||||
|
||||
|
@ -959,138 +969,105 @@ class TestShutil(unittest.TestCase):
|
|||
@requires_zlib
|
||||
def test_make_tarball(self):
|
||||
# creating something to tar
|
||||
tmpdir = self.mkdtemp()
|
||||
write_file((tmpdir, 'file1'), 'xxx')
|
||||
write_file((tmpdir, 'file2'), 'xxx')
|
||||
os.mkdir(os.path.join(tmpdir, 'sub'))
|
||||
write_file((tmpdir, 'sub', 'file3'), 'xxx')
|
||||
root_dir, base_dir = self._create_files('')
|
||||
|
||||
tmpdir2 = self.mkdtemp()
|
||||
# force shutil to create the directory
|
||||
os.rmdir(tmpdir2)
|
||||
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
|
||||
unittest.skipUnless(splitdrive(root_dir)[0] == splitdrive(tmpdir2)[0],
|
||||
"source and target should be on same drive")
|
||||
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
|
||||
# working with relative paths to avoid tar warnings
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
_make_tarball(splitdrive(base_name)[1], '.')
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
tarball = make_archive(splitdrive(base_name)[1], 'gztar', root_dir, '.')
|
||||
|
||||
# check if the compressed tarball was created
|
||||
tarball = base_name + '.tar.gz'
|
||||
self.assertTrue(os.path.exists(tarball))
|
||||
self.assertEqual(tarball, base_name + '.tar.gz')
|
||||
self.assertTrue(os.path.isfile(tarball))
|
||||
self.assertTrue(tarfile.is_tarfile(tarball))
|
||||
with tarfile.open(tarball, 'r:gz') as tf:
|
||||
self.assertCountEqual(tf.getnames(),
|
||||
['.', './sub', './sub2',
|
||||
'./file1', './file2', './sub/file3'])
|
||||
|
||||
# trying an uncompressed one
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
_make_tarball(splitdrive(base_name)[1], '.', compress=None)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
tarball = base_name + '.tar'
|
||||
self.assertTrue(os.path.exists(tarball))
|
||||
tarball = make_archive(splitdrive(base_name)[1], 'tar', root_dir, '.')
|
||||
self.assertEqual(tarball, base_name + '.tar')
|
||||
self.assertTrue(os.path.isfile(tarball))
|
||||
self.assertTrue(tarfile.is_tarfile(tarball))
|
||||
with tarfile.open(tarball, 'r') as tf:
|
||||
self.assertCountEqual(tf.getnames(),
|
||||
['.', './sub', './sub2',
|
||||
'./file1', './file2', './sub/file3'])
|
||||
|
||||
def _tarinfo(self, path):
|
||||
tar = tarfile.open(path)
|
||||
try:
|
||||
with tarfile.open(path) as tar:
|
||||
names = tar.getnames()
|
||||
names.sort()
|
||||
return tuple(names)
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
def _create_files(self):
|
||||
def _create_files(self, base_dir='dist'):
|
||||
# creating something to tar
|
||||
tmpdir = self.mkdtemp()
|
||||
dist = os.path.join(tmpdir, 'dist')
|
||||
os.mkdir(dist)
|
||||
root_dir = self.mkdtemp()
|
||||
dist = os.path.join(root_dir, base_dir)
|
||||
os.makedirs(dist, exist_ok=True)
|
||||
write_file((dist, 'file1'), 'xxx')
|
||||
write_file((dist, 'file2'), 'xxx')
|
||||
os.mkdir(os.path.join(dist, 'sub'))
|
||||
write_file((dist, 'sub', 'file3'), 'xxx')
|
||||
os.mkdir(os.path.join(dist, 'sub2'))
|
||||
tmpdir2 = self.mkdtemp()
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
return tmpdir, tmpdir2, base_name
|
||||
if base_dir:
|
||||
write_file((root_dir, 'outer'), 'xxx')
|
||||
return root_dir, base_dir
|
||||
|
||||
@requires_zlib
|
||||
@unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
|
||||
@unittest.skipUnless(find_executable('tar'),
|
||||
'Need the tar command to run')
|
||||
def test_tarfile_vs_tar(self):
|
||||
tmpdir, tmpdir2, base_name = self._create_files()
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
_make_tarball(base_name, 'dist')
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
root_dir, base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
tarball = make_archive(base_name, 'gztar', root_dir, base_dir)
|
||||
|
||||
# check if the compressed tarball was created
|
||||
tarball = base_name + '.tar.gz'
|
||||
self.assertTrue(os.path.exists(tarball))
|
||||
self.assertEqual(tarball, base_name + '.tar.gz')
|
||||
self.assertTrue(os.path.isfile(tarball))
|
||||
|
||||
# now create another tarball using `tar`
|
||||
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
|
||||
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
|
||||
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
with captured_stdout() as s:
|
||||
spawn(tar_cmd)
|
||||
spawn(gzip_cmd)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
tarball2 = os.path.join(root_dir, 'archive2.tar')
|
||||
tar_cmd = ['tar', '-cf', 'archive2.tar', base_dir]
|
||||
with support.change_cwd(root_dir), captured_stdout():
|
||||
spawn(tar_cmd)
|
||||
|
||||
self.assertTrue(os.path.exists(tarball2))
|
||||
self.assertTrue(os.path.isfile(tarball2))
|
||||
# let's compare both tarballs
|
||||
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
|
||||
|
||||
# trying an uncompressed one
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
_make_tarball(base_name, 'dist', compress=None)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
tarball = base_name + '.tar'
|
||||
self.assertTrue(os.path.exists(tarball))
|
||||
tarball = make_archive(base_name, 'tar', root_dir, base_dir)
|
||||
self.assertEqual(tarball, base_name + '.tar')
|
||||
self.assertTrue(os.path.isfile(tarball))
|
||||
|
||||
# now for a dry_run
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
_make_tarball(base_name, 'dist', compress=None, dry_run=True)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
tarball = base_name + '.tar'
|
||||
self.assertTrue(os.path.exists(tarball))
|
||||
tarball = make_archive(base_name, 'tar', root_dir, base_dir,
|
||||
dry_run=True)
|
||||
self.assertEqual(tarball, base_name + '.tar')
|
||||
self.assertTrue(os.path.isfile(tarball))
|
||||
|
||||
@requires_zlib
|
||||
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
|
||||
def test_make_zipfile(self):
|
||||
# creating something to tar
|
||||
tmpdir = self.mkdtemp()
|
||||
write_file((tmpdir, 'file1'), 'xxx')
|
||||
write_file((tmpdir, 'file2'), 'xxx')
|
||||
# creating something to zip
|
||||
root_dir, base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
res = make_archive(base_name, 'zip', root_dir, 'dist')
|
||||
|
||||
tmpdir2 = self.mkdtemp()
|
||||
# force shutil to create the directory
|
||||
os.rmdir(tmpdir2)
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
_make_zipfile(base_name, tmpdir)
|
||||
|
||||
# check if the compressed tarball was created
|
||||
tarball = base_name + '.zip'
|
||||
self.assertTrue(os.path.exists(tarball))
|
||||
self.assertEqual(res, base_name + '.zip')
|
||||
self.assertTrue(os.path.isfile(res))
|
||||
self.assertTrue(zipfile.is_zipfile(res))
|
||||
with zipfile.ZipFile(res) as zf:
|
||||
self.assertCountEqual(zf.namelist(),
|
||||
['dist/file1', 'dist/file2', 'dist/sub/file3'])
|
||||
|
||||
|
||||
def test_make_archive(self):
|
||||
|
@ -1108,40 +1085,37 @@ class TestShutil(unittest.TestCase):
|
|||
else:
|
||||
group = owner = 'root'
|
||||
|
||||
base_dir, root_dir, base_name = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp() , 'archive')
|
||||
root_dir, base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
|
||||
group=group)
|
||||
self.assertTrue(os.path.exists(res))
|
||||
self.assertTrue(os.path.isfile(res))
|
||||
|
||||
res = make_archive(base_name, 'zip', root_dir, base_dir)
|
||||
self.assertTrue(os.path.exists(res))
|
||||
self.assertTrue(os.path.isfile(res))
|
||||
|
||||
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
||||
owner=owner, group=group)
|
||||
self.assertTrue(os.path.exists(res))
|
||||
self.assertTrue(os.path.isfile(res))
|
||||
|
||||
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
||||
owner='kjhkjhkjg', group='oihohoh')
|
||||
self.assertTrue(os.path.exists(res))
|
||||
self.assertTrue(os.path.isfile(res))
|
||||
|
||||
|
||||
@requires_zlib
|
||||
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
|
||||
def test_tarfile_root_owner(self):
|
||||
tmpdir, tmpdir2, base_name = self._create_files()
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
root_dir, base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
group = grp.getgrgid(0)[0]
|
||||
owner = pwd.getpwuid(0)[0]
|
||||
try:
|
||||
archive_name = _make_tarball(base_name, 'dist', compress=None,
|
||||
owner=owner, group=group)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
with support.change_cwd(root_dir):
|
||||
archive_name = make_archive(base_name, 'gztar', root_dir, 'dist',
|
||||
owner=owner, group=group)
|
||||
|
||||
# check if the compressed tarball was created
|
||||
self.assertTrue(os.path.exists(archive_name))
|
||||
self.assertTrue(os.path.isfile(archive_name))
|
||||
|
||||
# now checks the rights
|
||||
archive = tarfile.open(archive_name)
|
||||
|
@ -1198,18 +1172,6 @@ class TestShutil(unittest.TestCase):
|
|||
formats = [name for name, params in get_archive_formats()]
|
||||
self.assertNotIn('xxx', formats)
|
||||
|
||||
def _compare_dirs(self, dir1, dir2):
|
||||
# check that dir1 and dir2 are equivalent,
|
||||
# return the diff
|
||||
diff = []
|
||||
for root, dirs, files in os.walk(dir1):
|
||||
for file_ in files:
|
||||
path = os.path.join(root, file_)
|
||||
target_path = os.path.join(dir2, os.path.split(path)[-1])
|
||||
if not os.path.exists(target_path):
|
||||
diff.append(file_)
|
||||
return diff
|
||||
|
||||
@requires_zlib
|
||||
def test_unpack_archive(self):
|
||||
formats = ['tar', 'gztar', 'zip']
|
||||
|
@ -1218,22 +1180,24 @@ class TestShutil(unittest.TestCase):
|
|||
if LZMA_SUPPORTED:
|
||||
formats.append('xztar')
|
||||
|
||||
root_dir, base_dir = self._create_files()
|
||||
for format in formats:
|
||||
tmpdir = self.mkdtemp()
|
||||
base_dir, root_dir, base_name = self._create_files()
|
||||
tmpdir2 = self.mkdtemp()
|
||||
expected = rlistdir(root_dir)
|
||||
expected.remove('outer')
|
||||
if format == 'zip':
|
||||
expected.remove('dist/sub2/')
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
filename = make_archive(base_name, format, root_dir, base_dir)
|
||||
|
||||
# let's try to unpack it now
|
||||
tmpdir2 = self.mkdtemp()
|
||||
unpack_archive(filename, tmpdir2)
|
||||
diff = self._compare_dirs(tmpdir, tmpdir2)
|
||||
self.assertEqual(diff, [])
|
||||
self.assertEqual(rlistdir(tmpdir2), expected)
|
||||
|
||||
# and again, this time with the format specified
|
||||
tmpdir3 = self.mkdtemp()
|
||||
unpack_archive(filename, tmpdir3, format=format)
|
||||
diff = self._compare_dirs(tmpdir, tmpdir3)
|
||||
self.assertEqual(diff, [])
|
||||
self.assertEqual(rlistdir(tmpdir3), expected)
|
||||
self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
|
||||
self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')
|
||||
|
||||
|
|
|
@ -317,11 +317,8 @@ class ProcessTestCase(BaseTestCase):
|
|||
# Normalize an expected cwd (for Tru64 support).
|
||||
# We can't use os.path.realpath since it doesn't expand Tru64 {memb}
|
||||
# strings. See bug #1063571.
|
||||
original_cwd = os.getcwd()
|
||||
os.chdir(cwd)
|
||||
cwd = os.getcwd()
|
||||
os.chdir(original_cwd)
|
||||
return cwd
|
||||
with support.change_cwd(cwd):
|
||||
return os.getcwd()
|
||||
|
||||
# For use in the test_cwd* tests below.
|
||||
def _split_python_path(self):
|
||||
|
|
|
@ -6,7 +6,7 @@ import shutil
|
|||
from copy import copy
|
||||
|
||||
from test.support import (run_unittest, TESTFN, unlink, check_warnings,
|
||||
captured_stdout, skip_unless_symlink)
|
||||
captured_stdout, skip_unless_symlink, change_cwd)
|
||||
|
||||
import sysconfig
|
||||
from sysconfig import (get_paths, get_platform, get_config_vars,
|
||||
|
@ -361,12 +361,8 @@ class TestSysConfig(unittest.TestCase):
|
|||
# srcdir should be independent of the current working directory
|
||||
# See Issues #15322, #15364.
|
||||
srcdir = sysconfig.get_config_var('srcdir')
|
||||
cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir('..')
|
||||
with change_cwd(os.pardir):
|
||||
srcdir2 = sysconfig.get_config_var('srcdir')
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
self.assertEqual(srcdir, srcdir2)
|
||||
|
||||
@unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
|
||||
|
|
|
@ -1132,10 +1132,8 @@ class WriteTest(WriteTestBase, unittest.TestCase):
|
|||
self.assertEqual(tar.getnames(), [],
|
||||
"added the archive to itself")
|
||||
|
||||
cwd = os.getcwd()
|
||||
os.chdir(TEMPDIR)
|
||||
tar.add(dstname)
|
||||
os.chdir(cwd)
|
||||
with support.change_cwd(TEMPDIR):
|
||||
tar.add(dstname)
|
||||
self.assertEqual(tar.getnames(), [],
|
||||
"added the archive to itself")
|
||||
finally:
|
||||
|
@ -1292,9 +1290,7 @@ class WriteTest(WriteTestBase, unittest.TestCase):
|
|||
|
||||
def test_cwd(self):
|
||||
# Test adding the current working directory.
|
||||
cwd = os.getcwd()
|
||||
os.chdir(TEMPDIR)
|
||||
try:
|
||||
with support.change_cwd(TEMPDIR):
|
||||
tar = tarfile.open(tmpname, self.mode)
|
||||
try:
|
||||
tar.add(".")
|
||||
|
@ -1308,8 +1304,6 @@ class WriteTest(WriteTestBase, unittest.TestCase):
|
|||
self.assertTrue(t.name.startswith("./"), t.name)
|
||||
finally:
|
||||
tar.close()
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
def test_open_nonwritable_fileobj(self):
|
||||
for exctype in OSError, EOFError, RuntimeError:
|
||||
|
|
|
@ -5,7 +5,7 @@ import os, glob, time, shutil
|
|||
import unicodedata
|
||||
|
||||
import unittest
|
||||
from test.support import (run_unittest, rmtree,
|
||||
from test.support import (run_unittest, rmtree, change_cwd,
|
||||
TESTFN_ENCODING, TESTFN_UNICODE, TESTFN_UNENCODABLE, create_empty_file)
|
||||
|
||||
if not os.path.supports_unicode_filenames:
|
||||
|
@ -82,13 +82,11 @@ class TestUnicodeFiles(unittest.TestCase):
|
|||
self.assertFalse(os.path.exists(filename2 + '.new'))
|
||||
|
||||
def _do_directory(self, make_name, chdir_name):
|
||||
cwd = os.getcwd()
|
||||
if os.path.isdir(make_name):
|
||||
rmtree(make_name)
|
||||
os.mkdir(make_name)
|
||||
try:
|
||||
os.chdir(chdir_name)
|
||||
try:
|
||||
with change_cwd(chdir_name):
|
||||
cwd_result = os.getcwd()
|
||||
name_result = make_name
|
||||
|
||||
|
@ -96,8 +94,6 @@ class TestUnicodeFiles(unittest.TestCase):
|
|||
name_result = unicodedata.normalize("NFD", name_result)
|
||||
|
||||
self.assertEqual(os.path.basename(cwd_result),name_result)
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
finally:
|
||||
os.rmdir(make_name)
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ class BaseTest:
|
|||
"""Basic bookkeeping required for testing."""
|
||||
|
||||
def setUp(self):
|
||||
self.old_unittest_module = unittest.case.warnings
|
||||
# The __warningregistry__ needs to be in a pristine state for tests
|
||||
# to work properly.
|
||||
if '__warningregistry__' in globals():
|
||||
|
@ -55,10 +56,15 @@ class BaseTest:
|
|||
# The 'warnings' module must be explicitly set so that the proper
|
||||
# interaction between _warnings and 'warnings' can be controlled.
|
||||
sys.modules['warnings'] = self.module
|
||||
# Ensure that unittest.TestCase.assertWarns() uses the same warnings
|
||||
# module than warnings.catch_warnings(). Otherwise,
|
||||
# warnings.catch_warnings() will be unable to remove the added filter.
|
||||
unittest.case.warnings = self.module
|
||||
super(BaseTest, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
sys.modules['warnings'] = original_warnings
|
||||
unittest.case.warnings = self.old_unittest_module
|
||||
super(BaseTest, self).tearDown()
|
||||
|
||||
class PublicAPITests(BaseTest):
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
from __future__ import nested_scopes # Backward compat for 2.1
|
||||
from unittest import TestCase
|
||||
from wsgiref.util import setup_testing_defaults
|
||||
from wsgiref.headers import Headers
|
||||
from wsgiref.handlers import BaseHandler, BaseCGIHandler
|
||||
from wsgiref import util
|
||||
from wsgiref.validate import validator
|
||||
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler, demo_app
|
||||
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler
|
||||
from wsgiref.simple_server import make_server
|
||||
from io import StringIO, BytesIO, BufferedReader
|
||||
from socketserver import BaseServer
|
||||
|
@ -14,8 +13,8 @@ from platform import python_implementation
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from test import support
|
||||
|
||||
class MockServer(WSGIServer):
|
||||
"""Non-socket HTTP server"""
|
||||
|
|
|
@ -583,8 +583,11 @@ class TestCase(object):
|
|||
finally:
|
||||
result.stopTest(self)
|
||||
return
|
||||
expecting_failure = getattr(testMethod,
|
||||
"__unittest_expecting_failure__", False)
|
||||
expecting_failure_method = getattr(testMethod,
|
||||
"__unittest_expecting_failure__", False)
|
||||
expecting_failure_class = getattr(self,
|
||||
"__unittest_expecting_failure__", False)
|
||||
expecting_failure = expecting_failure_class or expecting_failure_method
|
||||
outcome = _Outcome(result)
|
||||
try:
|
||||
self._outcome = outcome
|
||||
|
@ -1279,8 +1282,10 @@ class TestCase(object):
|
|||
assert expected_regex, "expected_regex must not be empty."
|
||||
expected_regex = re.compile(expected_regex)
|
||||
if not expected_regex.search(text):
|
||||
msg = msg or "Regex didn't match"
|
||||
msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text)
|
||||
standardMsg = "Regex didn't match: %r not found in %r" % (
|
||||
expected_regex.pattern, text)
|
||||
# _formatMessage ensures the longMessage option is respected
|
||||
msg = self._formatMessage(msg, standardMsg)
|
||||
raise self.failureException(msg)
|
||||
|
||||
def assertNotRegex(self, text, unexpected_regex, msg=None):
|
||||
|
@ -1289,11 +1294,12 @@ class TestCase(object):
|
|||
unexpected_regex = re.compile(unexpected_regex)
|
||||
match = unexpected_regex.search(text)
|
||||
if match:
|
||||
msg = msg or "Regex matched"
|
||||
msg = '%s: %r matches %r in %r' % (msg,
|
||||
text[match.start():match.end()],
|
||||
unexpected_regex.pattern,
|
||||
text)
|
||||
standardMsg = 'Regex matched: %r matches %r in %r' % (
|
||||
text[match.start() : match.end()],
|
||||
unexpected_regex.pattern,
|
||||
text)
|
||||
# _formatMessage ensures the longMessage option is respected
|
||||
msg = self._formatMessage(msg, standardMsg)
|
||||
raise self.failureException(msg)
|
||||
|
||||
|
||||
|
@ -1315,6 +1321,7 @@ class TestCase(object):
|
|||
failIf = _deprecate(assertFalse)
|
||||
assertRaisesRegexp = _deprecate(assertRaisesRegex)
|
||||
assertRegexpMatches = _deprecate(assertRegex)
|
||||
assertNotRegexpMatches = _deprecate(assertNotRegex)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -133,7 +133,6 @@ class Test_Assertions(unittest.TestCase):
|
|||
try:
|
||||
self.assertNotRegex('Ala ma kota', r'k.t', 'Message')
|
||||
except self.failureException as e:
|
||||
self.assertIn("'kot'", e.args[0])
|
||||
self.assertIn('Message', e.args[0])
|
||||
else:
|
||||
self.fail('assertNotRegex should have failed.')
|
||||
|
@ -329,6 +328,20 @@ class TestLongMessage(unittest.TestCase):
|
|||
"^unexpectedly identical: None$",
|
||||
"^unexpectedly identical: None : oops$"])
|
||||
|
||||
def testAssertRegex(self):
|
||||
self.assertMessages('assertRegex', ('foo', 'bar'),
|
||||
["^Regex didn't match:",
|
||||
"^oops$",
|
||||
"^Regex didn't match:",
|
||||
"^Regex didn't match: (.*) : oops$"])
|
||||
|
||||
def testAssertNotRegex(self):
|
||||
self.assertMessages('assertNotRegex', ('foo', 'foo'),
|
||||
["^Regex matched:",
|
||||
"^oops$",
|
||||
"^Regex matched:",
|
||||
"^Regex matched: (.*) : oops$"])
|
||||
|
||||
|
||||
def assertMessagesCM(self, methodName, args, func, errors):
|
||||
"""
|
||||
|
|
|
@ -120,6 +120,39 @@ class Test_TestSkipping(unittest.TestCase):
|
|||
self.assertEqual(result.expectedFailures[0][0], test)
|
||||
self.assertTrue(result.wasSuccessful())
|
||||
|
||||
def test_expected_failure_with_wrapped_class(self):
|
||||
@unittest.expectedFailure
|
||||
class Foo(unittest.TestCase):
|
||||
def test_1(self):
|
||||
self.assertTrue(False)
|
||||
|
||||
events = []
|
||||
result = LoggingResult(events)
|
||||
test = Foo("test_1")
|
||||
test.run(result)
|
||||
self.assertEqual(events,
|
||||
['startTest', 'addExpectedFailure', 'stopTest'])
|
||||
self.assertEqual(result.expectedFailures[0][0], test)
|
||||
self.assertTrue(result.wasSuccessful())
|
||||
|
||||
def test_expected_failure_with_wrapped_subclass(self):
|
||||
class Foo(unittest.TestCase):
|
||||
def test_1(self):
|
||||
self.assertTrue(False)
|
||||
|
||||
@unittest.expectedFailure
|
||||
class Bar(Foo):
|
||||
pass
|
||||
|
||||
events = []
|
||||
result = LoggingResult(events)
|
||||
test = Bar("test_1")
|
||||
test.run(result)
|
||||
self.assertEqual(events,
|
||||
['startTest', 'addExpectedFailure', 'stopTest'])
|
||||
self.assertEqual(result.expectedFailures[0][0], test)
|
||||
self.assertTrue(result.wasSuccessful())
|
||||
|
||||
def test_expected_failure_subtests(self):
|
||||
# A failure in any subtest counts as the expected failure of the
|
||||
# whole test.
|
||||
|
|
10
Misc/ACKS
10
Misc/ACKS
|
@ -103,6 +103,7 @@ Mike Bayer
|
|||
Samuel L. Bayer
|
||||
Donald Beaudry
|
||||
David Beazley
|
||||
John Beck
|
||||
Ingolf Becker
|
||||
Neal Becker
|
||||
Robin Becker
|
||||
|
@ -622,6 +623,7 @@ Ken Howard
|
|||
Brad Howes
|
||||
Mike Hoy
|
||||
Ben Hoyt
|
||||
Chiu-Hsiang Hsu
|
||||
Chih-Hao Huang
|
||||
Christian Hudon
|
||||
Lawrence Hudson
|
||||
|
@ -785,6 +787,7 @@ Andrew Kuchling
|
|||
Dave Kuhlman
|
||||
Jon Kuhn
|
||||
Toshio Kuratomi
|
||||
Ilia Kurenkov
|
||||
Vladimir Kushnir
|
||||
Erno Kuusela
|
||||
Ross Lagerwall
|
||||
|
@ -794,6 +797,7 @@ Thomas Lamb
|
|||
Valerie Lambert
|
||||
Jean-Baptiste "Jiba" Lamy
|
||||
Ronan Lamy
|
||||
Peter Landry
|
||||
Torsten Landschoff
|
||||
Łukasz Langa
|
||||
Tino Lange
|
||||
|
@ -911,12 +915,14 @@ Nick Mathewson
|
|||
Simon Mathieu
|
||||
Laura Matson
|
||||
Graham Matthews
|
||||
mattip
|
||||
Martin Matusiak
|
||||
Dieter Maurer
|
||||
Daniel May
|
||||
Madison May
|
||||
Lucas Maystre
|
||||
Arnaud Mazin
|
||||
Pam McA'Nulty
|
||||
Matt McClure
|
||||
Jack McCracken
|
||||
Rebecca McCreary
|
||||
|
@ -1061,6 +1067,7 @@ Jan Palus
|
|||
Yongzhi Pan
|
||||
Martin Panter
|
||||
Mathias Panzenböck
|
||||
Marco Paolini
|
||||
M. Papillon
|
||||
Peter Parente
|
||||
Alexandre Parenteau
|
||||
|
@ -1121,6 +1128,7 @@ Martin Pool
|
|||
Iustin Pop
|
||||
Claudiu Popa
|
||||
John Popplewell
|
||||
Matheus Vieira Portela
|
||||
Davin Potts
|
||||
Guillaume Pratte
|
||||
Florian Preinstorfer
|
||||
|
@ -1182,6 +1190,7 @@ Vlad Riscutia
|
|||
Wes Rishel
|
||||
Daniel Riti
|
||||
Juan M. Bello Rivas
|
||||
Mohd Sanad Zaki Rizvi
|
||||
Davide Rizzo
|
||||
Anthony Roach
|
||||
Carl Robben
|
||||
|
@ -1514,6 +1523,7 @@ Bob Weiner
|
|||
Edward Welbourne
|
||||
Cliff Wells
|
||||
Rickard Westman
|
||||
Joseph Weston
|
||||
Jeff Wheeler
|
||||
Christopher White
|
||||
David White
|
||||
|
|
81
Misc/NEWS
81
Misc/NEWS
|
@ -2,6 +2,84 @@
|
|||
Python News
|
||||
+++++++++++
|
||||
|
||||
|
||||
What's New in Python 3.5.1
|
||||
==========================
|
||||
|
||||
Release date: TBA
|
||||
|
||||
Core and Builtins
|
||||
-----------------
|
||||
|
||||
Library
|
||||
-------
|
||||
|
||||
- Issue #16180: Exit pdb if file has syntax error, instead of trapping user
|
||||
in an infinite loop. Patch by Xavier de Gaye.
|
||||
|
||||
- Issue #24891: Fix a race condition at Python startup if the file descriptor
|
||||
of stdin (0), stdout (1) or stderr (2) is closed while Python is creating
|
||||
sys.stdin, sys.stdout and sys.stderr objects. These attributes are now set
|
||||
to None if the creation of the object failed, instead of raising an OSError
|
||||
exception. Initial patch written by Marco Paolini.
|
||||
|
||||
- Issue #24992: Fix error handling and a race condition (related to garbage
|
||||
collection) in collections.OrderedDict constructor.
|
||||
|
||||
- Issue #24881: Fixed setting binary mode in Python implementation of FileIO
|
||||
on Windows and Cygwin. Patch from Akira Li.
|
||||
|
||||
- Issue #21112: Fix regression in unittest.expectedFailure on subclasses.
|
||||
Patch from Berker Peksag.
|
||||
|
||||
- Issue #24764: cgi.FieldStorage.read_multi() now ignores the Content-Length
|
||||
header in part headers. Patch written by Peter Landry and reviewed by Pierre
|
||||
Quentel.
|
||||
|
||||
- Issue #24913: Fix overrun error in deque.index().
|
||||
Found by John Leitch and Bryce Darling.
|
||||
|
||||
- Issue #24774: Fix docstring in http.server.test. Patch from Chiu-Hsiang Hsu.
|
||||
|
||||
- Issue #21159: Improve message in configparser.InterpolationMissingOptionError.
|
||||
Patch from Łukasz Langa.
|
||||
|
||||
- Issue #20362: Honour TestCase.longMessage correctly in assertRegex.
|
||||
Patch from Ilia Kurenkov.
|
||||
|
||||
- Issue #23572: Fixed functools.singledispatch on classes with falsy
|
||||
metaclasses. Patch by Ethan Furman.
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
- Issue #24952: Clarify the default size argument of stack_size() in
|
||||
the "threading" and "_thread" modules. Patch from Mattip.
|
||||
|
||||
- Issue #23725: Overhaul tempfile docs. Note deprecated status of mktemp.
|
||||
Patch from Zbigniew Jędrzejewski-Szmek.
|
||||
|
||||
- Issue #24808: Update the types of some PyTypeObject fields. Patch by
|
||||
Joseph Weston.
|
||||
|
||||
- Issue #22812: Fix unittest discovery examples.
|
||||
Patch from Pam McA'Nulty.
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
- PCbuild\rt.bat now accepts an unlimited number of arguments to pass along
|
||||
to regrtest.py. Previously there was a limit of 9.
|
||||
|
||||
Build
|
||||
-----
|
||||
|
||||
- Issue #24910: Windows MSIs now have unique display names.
|
||||
|
||||
- Issue #24986: It is now possible to build Python on Windows without errors
|
||||
when external libraries are not available.
|
||||
|
||||
|
||||
What's New in Python 3.5.0 release candidate 3?
|
||||
===============================================
|
||||
|
||||
|
@ -21,6 +99,8 @@ Library
|
|||
-------
|
||||
|
||||
- Issue #24917: time_strftime() buffer over-read.
|
||||
- Issue #23144: Make sure that HTMLParser.feed() returns all the data, even
|
||||
when convert_charrefs is True.
|
||||
|
||||
- Issue #24748: To resolve a compatibility problem found with py2exe and
|
||||
pywin32, imp.load_dynamic() once again ignores previously loaded modules
|
||||
|
@ -30,6 +110,7 @@ Library
|
|||
- Issue #24635: Fixed a bug in typing.py where isinstance([], typing.Iterable)
|
||||
would return True once, then False on subsequent calls.
|
||||
|
||||
|
||||
- Issue #24989: Fixed buffer overread in BytesIO.readline() if a position is
|
||||
set beyond size. Based on patch by John Leitch.
|
||||
|
||||
|
|
42
Misc/Porting
42
Misc/Porting
|
@ -1,41 +1 @@
|
|||
Q. I want to port Python to a new platform. How do I begin?
|
||||
|
||||
A. I guess the two things to start with is to familiarize yourself
|
||||
with are the development system for your target platform and the
|
||||
generic build process for Python. Make sure you can compile and run a
|
||||
simple hello-world program on your target platform. Make sure you can
|
||||
compile and run the Python interpreter on a platform to which it has
|
||||
already been ported (preferably Unix, but Mac or Windows will do,
|
||||
too).
|
||||
|
||||
I also would never start something like this without at least
|
||||
medium-level understanding of your target platform (i.e. how it is
|
||||
generally used, how to write platform specific apps etc.) and Python
|
||||
(or else you'll never know how to test the results).
|
||||
|
||||
The build process for Python, in particular the Makefiles in the
|
||||
source distribution, will give you a hint on which files to compile
|
||||
for Python. Not all source files are relevant -- some are platform
|
||||
specific, others are only used in emergencies (e.g. getopt.c). The
|
||||
Makefiles tell the story.
|
||||
|
||||
You'll also need a pyconfig.h file tailored for your platform. You can
|
||||
start with pyconfig.h.in, read the comments and turn on definitions that
|
||||
apply to your platform.
|
||||
|
||||
And you'll need a config.c file, which lists the built-in modules you
|
||||
support. Start with Modules/config.c.in.
|
||||
|
||||
Finally, you'll run into some things that aren't supported on your
|
||||
target platform. Forget about the posix module for now -- simply take
|
||||
it out of the config.c file.
|
||||
|
||||
Bang on it until you get a >>> prompt. (You may have to disable the
|
||||
importing of "site.py" by passing the -S option.)
|
||||
|
||||
Then bang on it until it executes very simple Python statements.
|
||||
|
||||
Now bang on it some more. At some point you'll want to use the os
|
||||
module; this is the time to start thinking about what to do with the
|
||||
posix module. It's okay to simply #ifdef out those functions that
|
||||
cause problems; the remaining ones will be quite useful.
|
||||
This document is moved to https://docs.python.org/devguide/faq.html#how-do-i-port-python-to-a-new-platform
|
||||
|
|
|
@ -419,9 +419,11 @@ deque_extend(dequeobject *deque, PyObject *iterable)
|
|||
deque->rightblock->data[deque->rightindex] = item;
|
||||
deque_trim_left(deque);
|
||||
}
|
||||
Py_DECREF(it);
|
||||
if (PyErr_Occurred())
|
||||
if (PyErr_Occurred()) {
|
||||
Py_DECREF(it);
|
||||
return NULL;
|
||||
}
|
||||
Py_DECREF(it);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
|
@ -480,9 +482,11 @@ deque_extendleft(dequeobject *deque, PyObject *iterable)
|
|||
deque->leftblock->data[deque->leftindex] = item;
|
||||
deque_trim_right(deque);
|
||||
}
|
||||
Py_DECREF(it);
|
||||
if (PyErr_Occurred())
|
||||
if (PyErr_Occurred()) {
|
||||
Py_DECREF(it);
|
||||
return NULL;
|
||||
}
|
||||
Py_DECREF(it);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
|
@ -497,8 +501,8 @@ deque_inplace_concat(dequeobject *deque, PyObject *other)
|
|||
result = deque_extend(deque, other);
|
||||
if (result == NULL)
|
||||
return result;
|
||||
Py_DECREF(result);
|
||||
Py_INCREF(deque);
|
||||
Py_DECREF(result);
|
||||
return (PyObject *)deque;
|
||||
}
|
||||
|
||||
|
@ -1260,8 +1264,8 @@ deque_repr(PyObject *deque)
|
|||
aslist, ((dequeobject *)deque)->maxlen);
|
||||
else
|
||||
result = PyUnicode_FromFormat("deque(%R)", aslist);
|
||||
Py_DECREF(aslist);
|
||||
Py_ReprLeave(deque);
|
||||
Py_DECREF(aslist);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
#ifdef PPRO
|
||||
#if defined(_MSC_VER)
|
||||
#include <float.h>
|
||||
#pragma float_control(precise, on)
|
||||
#pragma fenv_access(on)
|
||||
#elif !defined(__OpenBSD__) && !defined(__NetBSD__)
|
||||
/* C99 */
|
||||
|
|
|
@ -4560,9 +4560,7 @@ typedef struct {
|
|||
} \
|
||||
|
||||
|
||||
#define UTIME_HAVE_DIR_FD (defined(HAVE_FUTIMESAT) || defined(HAVE_UTIMENSAT))
|
||||
|
||||
#if UTIME_HAVE_DIR_FD
|
||||
#if defined(HAVE_FUTIMESAT) || defined(HAVE_UTIMENSAT)
|
||||
|
||||
static int
|
||||
utime_dir_fd(utime_t *ut, int dir_fd, char *path, int follow_symlinks)
|
||||
|
@ -4588,9 +4586,7 @@ utime_dir_fd(utime_t *ut, int dir_fd, char *path, int follow_symlinks)
|
|||
#define FUTIMENSAT_DIR_FD_CONVERTER dir_fd_unavailable
|
||||
#endif
|
||||
|
||||
#define UTIME_HAVE_FD (defined(HAVE_FUTIMES) || defined(HAVE_FUTIMENS))
|
||||
|
||||
#if UTIME_HAVE_FD
|
||||
#if defined(HAVE_FUTIMES) || defined(HAVE_FUTIMENS)
|
||||
|
||||
static int
|
||||
utime_fd(utime_t *ut, int fd)
|
||||
|
@ -4835,13 +4831,13 @@ os_utime_impl(PyModuleDef *module, path_t *path, PyObject *times,
|
|||
else
|
||||
#endif
|
||||
|
||||
#if UTIME_HAVE_DIR_FD
|
||||
#if defined(HAVE_FUTIMESAT) || defined(HAVE_UTIMENSAT)
|
||||
if ((dir_fd != DEFAULT_DIR_FD) || (!follow_symlinks))
|
||||
result = utime_dir_fd(&utime, dir_fd, path->narrow, follow_symlinks);
|
||||
else
|
||||
#endif
|
||||
|
||||
#if UTIME_HAVE_FD
|
||||
#if defined(HAVE_FUTIMES) || defined(HAVE_FUTIMENS)
|
||||
if (path->fd != -1)
|
||||
result = utime_fd(&utime, path->fd);
|
||||
else
|
||||
|
|
|
@ -98,7 +98,6 @@ For removing nodes:
|
|||
|
||||
Others:
|
||||
|
||||
* _odict_initialize(od)
|
||||
* _odict_find_node(od, key)
|
||||
* _odict_keys_equal(od1, od2)
|
||||
|
||||
|
@ -602,15 +601,6 @@ _odict_get_index(PyODictObject *od, PyObject *key)
|
|||
return _odict_get_index_hash(od, key, hash);
|
||||
}
|
||||
|
||||
static int
|
||||
_odict_initialize(PyODictObject *od)
|
||||
{
|
||||
od->od_state = 0;
|
||||
_odict_FIRST(od) = NULL;
|
||||
_odict_LAST(od) = NULL;
|
||||
return _odict_resize((PyODictObject *)od);
|
||||
}
|
||||
|
||||
/* Returns NULL if there was some error or the key was not found. */
|
||||
static _ODictNode *
|
||||
_odict_find_node(PyODictObject *od, PyObject *key)
|
||||
|
@ -744,7 +734,7 @@ _odict_pop_node(PyODictObject *od, _ODictNode *node, PyObject *key)
|
|||
/* If someone calls PyDict_DelItem() directly on an OrderedDict, we'll
|
||||
get all sorts of problems here. In PyODict_DelItem we make sure to
|
||||
call _odict_clear_node first.
|
||||
|
||||
|
||||
This matters in the case of colliding keys. Suppose we add 3 keys:
|
||||
[A, B, C], where the hash of C collides with A and the next possible
|
||||
index in the hash table is occupied by B. If we remove B then for C
|
||||
|
@ -1739,14 +1729,28 @@ odict_init(PyObject *self, PyObject *args, PyObject *kwds)
|
|||
static PyObject *
|
||||
odict_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *od = PyDict_Type.tp_new(type, args, kwds);
|
||||
if (od != NULL) {
|
||||
if (_odict_initialize((PyODictObject *)od) < 0)
|
||||
return NULL;
|
||||
((PyODictObject *)od)->od_inst_dict = PyDict_New();
|
||||
((PyODictObject *)od)->od_weakreflist = NULL;
|
||||
PyObject *dict;
|
||||
PyODictObject *od;
|
||||
|
||||
dict = PyDict_New();
|
||||
if (dict == NULL)
|
||||
return NULL;
|
||||
|
||||
od = (PyODictObject *)PyDict_Type.tp_new(type, args, kwds);
|
||||
if (od == NULL) {
|
||||
Py_DECREF(dict);
|
||||
return NULL;
|
||||
}
|
||||
return od;
|
||||
|
||||
od->od_inst_dict = dict;
|
||||
/* type constructor fills the memory with zeros (see
|
||||
PyType_GenericAlloc()), there is no need to set them to zero again */
|
||||
if (_odict_resize(od) < 0) {
|
||||
Py_DECREF(od);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return (PyObject*)od;
|
||||
}
|
||||
|
||||
/* PyODict_Type */
|
||||
|
|
|
@ -147,7 +147,11 @@ WIN32 is still required for the locale module.
|
|||
#define MS_WINI64
|
||||
#define PYD_PLATFORM_TAG "win_ia64"
|
||||
#elif defined(_M_X64) || defined(_M_AMD64)
|
||||
#if defined(__INTEL_COMPILER)
|
||||
#define COMPILER ("[ICC v." _Py_STRINGIZE(__INTEL_COMPILER) " 64 bit (amd64) with MSC v." _Py_STRINGIZE(_MSC_VER) " CRT]")
|
||||
#else
|
||||
#define COMPILER _Py_PASTE_VERSION("64 bit (AMD64)")
|
||||
#endif /* __INTEL_COMPILER */
|
||||
#define MS_WINX64
|
||||
#define PYD_PLATFORM_TAG "win_amd64"
|
||||
#else
|
||||
|
@ -194,7 +198,11 @@ typedef _W64 int ssize_t;
|
|||
|
||||
#if defined(MS_WIN32) && !defined(MS_WIN64)
|
||||
#if defined(_M_IX86)
|
||||
#if defined(__INTEL_COMPILER)
|
||||
#define COMPILER ("[ICC v." _Py_STRINGIZE(__INTEL_COMPILER) " 32 bit (Intel) with MSC v." _Py_STRINGIZE(_MSC_VER) " CRT]")
|
||||
#else
|
||||
#define COMPILER _Py_PASTE_VERSION("32 bit (Intel)")
|
||||
#endif /* __INTEL_COMPILER */
|
||||
#define PYD_PLATFORM_TAG "win32"
|
||||
#elif defined(_M_ARM)
|
||||
#define COMPILER _Py_PASTE_VERSION("32 bit (ARM)")
|
||||
|
|
|
@ -1,19 +1,46 @@
|
|||
@echo off
|
||||
rem A batch program to build or rebuild a particular configuration,
|
||||
rem just for convenience.
|
||||
|
||||
rem Arguments:
|
||||
rem -c Set the configuration (default: Release)
|
||||
rem -p Set the platform (x64 or Win32, default: Win32)
|
||||
rem -r Target Rebuild instead of Build
|
||||
rem -t Set the target manually (Build, Rebuild, Clean, or CleanAll)
|
||||
rem -d Set the configuration to Debug
|
||||
rem -e Pull in external libraries using get_externals.bat
|
||||
rem -m Enable parallel build (enabled by default)
|
||||
rem -M Disable parallel build
|
||||
rem -v Increased output messages
|
||||
rem -k Attempt to kill any running Pythons before building (usually unnecessary)
|
||||
goto Run
|
||||
:Usage
|
||||
echo.%~nx0 [flags and arguments] [quoted MSBuild options]
|
||||
echo.
|
||||
echo.Build CPython from the command line. Requires the appropriate
|
||||
echo.version(s) of Microsoft Visual Studio to be installed (see readme.txt).
|
||||
echo.Also requires Subversion (svn.exe) to be on PATH if the '-e' flag is
|
||||
echo.given.
|
||||
echo.
|
||||
echo.After the flags recognized by this script, up to 9 arguments to be passed
|
||||
echo.directly to MSBuild may be passed. If the argument contains an '=', the
|
||||
echo.entire argument must be quoted (e.g. `%~nx0 "/p:PlatformToolset=v100"`)
|
||||
echo.
|
||||
echo.Available flags:
|
||||
echo. -h Display this help message
|
||||
echo. -V Display version information for the current build
|
||||
echo. -r Target Rebuild instead of Build
|
||||
echo. -d Set the configuration to Debug
|
||||
echo. -e Build external libraries fetched by get_externals.bat
|
||||
echo. Extension modules that depend on external libraries will not attempt
|
||||
echo. to build if this flag is not present
|
||||
echo. -m Enable parallel build (enabled by default)
|
||||
echo. -M Disable parallel build
|
||||
echo. -v Increased output messages
|
||||
echo. -k Attempt to kill any running Pythons before building (usually done
|
||||
echo. automatically by the pythoncore project)
|
||||
echo.
|
||||
echo.Available flags to avoid building certain modules.
|
||||
echo.These flags have no effect if '-e' is not given:
|
||||
echo. --no-ssl Do not attempt to build _ssl
|
||||
echo. --no-tkinter Do not attempt to build Tkinter
|
||||
echo.
|
||||
echo.Available arguments:
|
||||
echo. -c Release ^| Debug ^| PGInstrument ^| PGUpdate
|
||||
echo. Set the configuration (default: Release)
|
||||
echo. -p x64 ^| Win32
|
||||
echo. Set the platform (default: Win32)
|
||||
echo. -t Build ^| Rebuild ^| Clean ^| CleanAll
|
||||
echo. Set the target manually
|
||||
exit /b 127
|
||||
|
||||
:Run
|
||||
setlocal
|
||||
set platf=Win32
|
||||
set vs_platf=x86
|
||||
|
@ -25,17 +52,29 @@ set verbose=/nologo /v:m
|
|||
set kill=
|
||||
|
||||
:CheckOpts
|
||||
if "%~1"=="-h" goto Usage
|
||||
if "%~1"=="-c" (set conf=%2) & shift & shift & goto CheckOpts
|
||||
if "%~1"=="-p" (set platf=%2) & shift & shift & goto CheckOpts
|
||||
if "%~1"=="-r" (set target=Rebuild) & shift & goto CheckOpts
|
||||
if "%~1"=="-t" (set target=%2) & shift & shift & goto CheckOpts
|
||||
if "%~1"=="-d" (set conf=Debug) & shift & goto CheckOpts
|
||||
if "%~1"=="-e" call "%dir%get_externals.bat" & shift & goto CheckOpts
|
||||
if "%~1"=="-m" (set parallel=/m) & shift & goto CheckOpts
|
||||
if "%~1"=="-M" (set parallel=) & shift & goto CheckOpts
|
||||
if "%~1"=="-v" (set verbose=/v:n) & shift & goto CheckOpts
|
||||
if "%~1"=="-k" (set kill=true) & shift & goto CheckOpts
|
||||
if "%~1"=="-V" shift & goto Version
|
||||
rem These use the actual property names used by MSBuild. We could just let
|
||||
rem them in through the environment, but we specify them on the command line
|
||||
rem anyway for visibility so set defaults after this
|
||||
if "%~1"=="-e" (set IncludeExternals=true) & shift & goto CheckOpts
|
||||
if "%~1"=="--no-ssl" (set IncludeSSL=false) & shift & goto CheckOpts
|
||||
if "%~1"=="--no-tkinter" (set IncludeTkinter=false) & shift & goto CheckOpts
|
||||
|
||||
if "%IncludeExternals%"=="" set IncludeExternals=false
|
||||
if "%IncludeSSL%"=="" set IncludeSSL=true
|
||||
if "%IncludeTkinter%"=="" set IncludeTkinter=true
|
||||
|
||||
if "%IncludeExternals%"=="true" call "%dir%get_externals.bat"
|
||||
|
||||
if "%platf%"=="x64" (set vs_platf=x86_amd64)
|
||||
|
||||
|
@ -43,14 +82,18 @@ rem Setup the environment
|
|||
call "%dir%env.bat" %vs_platf% >nul
|
||||
|
||||
if "%kill%"=="true" (
|
||||
msbuild /v:m /nologo /target:KillPython "%pcbuild%\pythoncore.vcxproj" /p:Configuration=%conf% /p:Platform=%platf% /p:KillPython=true
|
||||
msbuild /v:m /nologo /target:KillPython "%dir%\pythoncore.vcxproj" /p:Configuration=%conf% /p:Platform=%platf% /p:KillPython=true
|
||||
)
|
||||
|
||||
rem Call on MSBuild to do the work, echo the command.
|
||||
rem Passing %1-9 is not the preferred option, but argument parsing in
|
||||
rem batch is, shall we say, "lackluster"
|
||||
echo on
|
||||
msbuild "%dir%pcbuild.proj" /t:%target% %parallel% %verbose% /p:Configuration=%conf% /p:Platform=%platf% %1 %2 %3 %4 %5 %6 %7 %8 %9
|
||||
msbuild "%dir%pcbuild.proj" /t:%target% %parallel% %verbose%^
|
||||
/p:Configuration=%conf% /p:Platform=%platf%^
|
||||
/p:IncludeExternals=%IncludeExternals%^
|
||||
/p:IncludeSSL=%IncludeSSL% /p:IncludeTkinter=%IncludeTkinter%^
|
||||
%1 %2 %3 %4 %5 %6 %7 %8 %9
|
||||
|
||||
@goto :eof
|
||||
|
||||
|
|
|
@ -51,16 +51,17 @@ if ERRORLEVEL 9009 (
|
|||
|
||||
echo.Fetching external libraries...
|
||||
|
||||
for %%e in (
|
||||
bzip2-1.0.6
|
||||
nasm-2.11.06
|
||||
openssl-1.0.2d
|
||||
tcl-core-8.6.4.2
|
||||
tk-8.6.4.2
|
||||
tix-8.4.3.6
|
||||
sqlite-3.8.11.0
|
||||
xz-5.0.5
|
||||
) do (
|
||||
set libraries=
|
||||
set libraries=%libraries% bzip2-1.0.6
|
||||
if NOT "%IncludeSSL%"=="false" set libraries=%libraries% nasm-2.11.06
|
||||
if NOT "%IncludeSSL%"=="false" set libraries=%libraries% openssl-1.0.2d
|
||||
set libraries=%libraries% sqlite-3.8.11.0
|
||||
if NOT "%IncludeTkinter%"=="false" set libraries=%libraries% tcl-core-8.6.4.2
|
||||
if NOT "%IncludeTkinter%"=="false" set libraries=%libraries% tk-8.6.4.2
|
||||
if NOT "%IncludeTkinter%"=="false" set libraries=%libraries% tix-8.4.3.6
|
||||
set libraries=%libraries% xz-5.0.5
|
||||
|
||||
for %%e in (%libraries%) do (
|
||||
if exist %%e (
|
||||
echo.%%e already exists, skipping.
|
||||
) else (
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
<Platform Condition="'$(Platform)' == ''">Win32</Platform>
|
||||
<Configuration Condition="'$(Configuration)' == ''">Release</Configuration>
|
||||
<IncludeExtensions Condition="'$(IncludeExtensions)' == ''">true</IncludeExtensions>
|
||||
<IncludeExternals Condition="'$(IncludeExternals)' == ''">true</IncludeExternals>
|
||||
<IncludeTests Condition="'$(IncludeTest)' == ''">true</IncludeTests>
|
||||
<IncludeSSL Condition="'$(IncludeSSL)' == ''">true</IncludeSSL>
|
||||
<IncludeTkinter Condition="'$(IncludeTkinter)' == ''">true</IncludeTkinter>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemDefinitionGroup>
|
||||
|
@ -25,7 +27,7 @@
|
|||
<!--
|
||||
Parallel build is explicitly disabled for this project because it
|
||||
causes many conflicts between pythoncore and projects that depend
|
||||
in pythoncore. Once the core DLL has been built, subsequent
|
||||
on pythoncore. Once the core DLL has been built, subsequent
|
||||
projects will be built in parallel.
|
||||
-->
|
||||
<Projects Include="pythoncore.vcxproj">
|
||||
|
@ -40,10 +42,14 @@
|
|||
<!-- _freeze_importlib -->
|
||||
<Projects Include="_freeze_importlib.vcxproj" />
|
||||
<!-- Extension modules -->
|
||||
<ExtensionModules Include="_bz2;_ctypes;_decimal;_elementtree;_lzma;_msi;_multiprocessing;_overlapped;_sqlite3;_tkinter;tix;pyexpat;select;unicodedata;winsound" />
|
||||
<ExtensionModules Include="_ctypes;_decimal;_elementtree;_msi;_multiprocessing;_overlapped;pyexpat;select;unicodedata;winsound" />
|
||||
<!-- Extension modules that require external sources -->
|
||||
<ExternalModules Include="_bz2;_lzma;_sqlite3" />
|
||||
<!-- _ssl will build _socket as well, which may cause conflicts in parallel builds -->
|
||||
<ExtensionModules Include="_socket" Condition="!$(IncludeSSL)" />
|
||||
<ExtensionModules Include="_ssl;_hashlib" Condition="$(IncludeSSL)" />
|
||||
<ExtensionModules Include="_socket" Condition="!$(IncludeSSL) or !$(IncludeExternals)" />
|
||||
<ExternalModules Include="_ssl;_hashlib" Condition="$(IncludeSSL)" />
|
||||
<ExternalModules Include="_tkinter;tix" Condition="$(IncludeTkinter)" />
|
||||
<ExtensionModules Include="@(ExternalModules->'%(Identity)')" Condition="$(IncludeExternals)" />
|
||||
<Projects Include="@(ExtensionModules->'%(Identity).vcxproj')" Condition="$(IncludeExtensions)" />
|
||||
<!-- Test modules -->
|
||||
<TestModules Include="_ctypes_test;_testbuffer;_testcapi;_testembed;_testimportmultiple;_testmultiphase" />
|
||||
|
|
|
@ -32,15 +32,17 @@ set prefix=%pcbuild%win32\
|
|||
set suffix=
|
||||
set qmode=
|
||||
set dashO=
|
||||
set regrtestargs=
|
||||
|
||||
:CheckOpts
|
||||
if "%1"=="-O" (set dashO=-O) & shift & goto CheckOpts
|
||||
if "%1"=="-q" (set qmode=yes) & shift & goto CheckOpts
|
||||
if "%1"=="-d" (set suffix=_d) & shift & goto CheckOpts
|
||||
if "%1"=="-x64" (set prefix=%pcbuild%amd64\) & shift & goto CheckOpts
|
||||
if NOT "%1"=="" (set regrtestargs=%regrtestargs% %1) & shift & goto CheckOpts
|
||||
|
||||
set exe=%prefix%python%suffix%.exe
|
||||
set cmd="%exe%" %dashO% -Wd -E -bb "%pcbuild%..\lib\test\regrtest.py" %1 %2 %3 %4 %5 %6 %7 %8 %9
|
||||
set cmd="%exe%" %dashO% -Wd -E -bb "%pcbuild%..\lib\test\regrtest.py" %regrtestargs%
|
||||
if defined qmode goto Qmode
|
||||
|
||||
echo Deleting .pyc/.pyo files ...
|
||||
|
|
|
@ -966,7 +966,6 @@ PyObject *PyCodec_BackslashReplaceErrors(PyObject *exc)
|
|||
}
|
||||
|
||||
static _PyUnicode_Name_CAPI *ucnhash_CAPI = NULL;
|
||||
static int ucnhash_initialized = 0;
|
||||
|
||||
PyObject *PyCodec_NameReplaceErrors(PyObject *exc)
|
||||
{
|
||||
|
@ -988,17 +987,17 @@ PyObject *PyCodec_NameReplaceErrors(PyObject *exc)
|
|||
return NULL;
|
||||
if (!(object = PyUnicodeEncodeError_GetObject(exc)))
|
||||
return NULL;
|
||||
if (!ucnhash_initialized) {
|
||||
if (!ucnhash_CAPI) {
|
||||
/* load the unicode data module */
|
||||
ucnhash_CAPI = (_PyUnicode_Name_CAPI *)PyCapsule_Import(
|
||||
PyUnicodeData_CAPSULE_NAME, 1);
|
||||
ucnhash_initialized = 1;
|
||||
if (!ucnhash_CAPI)
|
||||
return NULL;
|
||||
}
|
||||
for (i = start, ressize = 0; i < end; ++i) {
|
||||
/* object is guaranteed to be "ready" */
|
||||
c = PyUnicode_READ_CHAR(object, i);
|
||||
if (ucnhash_CAPI &&
|
||||
ucnhash_CAPI->getname(NULL, c, buffer, sizeof(buffer), 1)) {
|
||||
if (ucnhash_CAPI->getname(NULL, c, buffer, sizeof(buffer), 1)) {
|
||||
replsize = 1+1+1+(int)strlen(buffer)+1;
|
||||
}
|
||||
else if (c >= 0x10000) {
|
||||
|
@ -1021,8 +1020,7 @@ PyObject *PyCodec_NameReplaceErrors(PyObject *exc)
|
|||
i < end; ++i) {
|
||||
c = PyUnicode_READ_CHAR(object, i);
|
||||
*outp++ = '\\';
|
||||
if (ucnhash_CAPI &&
|
||||
ucnhash_CAPI->getname(NULL, c, buffer, sizeof(buffer), 1)) {
|
||||
if (ucnhash_CAPI->getname(NULL, c, buffer, sizeof(buffer), 1)) {
|
||||
*outp++ = 'N';
|
||||
*outp++ = '{';
|
||||
strcpy((char *)outp, buffer);
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
/* Python interpreter top-level routines, including init/exit */
|
||||
|
||||
#include "Python.h"
|
||||
|
@ -963,6 +962,23 @@ initsite(void)
|
|||
}
|
||||
}
|
||||
|
||||
/* Check if a file descriptor is valid or not.
|
||||
Return 0 if the file descriptor is invalid, return non-zero otherwise. */
|
||||
static int
|
||||
is_valid_fd(int fd)
|
||||
{
|
||||
int fd2;
|
||||
if (fd < 0 || !_PyVerify_fd(fd))
|
||||
return 0;
|
||||
_Py_BEGIN_SUPPRESS_IPH
|
||||
fd2 = dup(fd);
|
||||
if (fd2 >= 0)
|
||||
close(fd2);
|
||||
_Py_END_SUPPRESS_IPH
|
||||
return fd2 >= 0;
|
||||
}
|
||||
|
||||
/* returns Py_None if the fd is not valid */
|
||||
static PyObject*
|
||||
create_stdio(PyObject* io,
|
||||
int fd, int write_mode, char* name,
|
||||
|
@ -978,6 +994,9 @@ create_stdio(PyObject* io,
|
|||
_Py_IDENTIFIER(TextIOWrapper);
|
||||
_Py_IDENTIFIER(mode);
|
||||
|
||||
if (!is_valid_fd(fd))
|
||||
Py_RETURN_NONE;
|
||||
|
||||
/* stdin is always opened in buffered mode, first because it shouldn't
|
||||
make a difference in common use cases, second because TextIOWrapper
|
||||
depends on the presence of a read1() method which only exists on
|
||||
|
@ -1059,21 +1078,15 @@ error:
|
|||
Py_XDECREF(stream);
|
||||
Py_XDECREF(text);
|
||||
Py_XDECREF(raw);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static int
|
||||
is_valid_fd(int fd)
|
||||
{
|
||||
int dummy_fd;
|
||||
if (fd < 0 || !_PyVerify_fd(fd))
|
||||
return 0;
|
||||
_Py_BEGIN_SUPPRESS_IPH
|
||||
dummy_fd = dup(fd);
|
||||
if (dummy_fd >= 0)
|
||||
close(dummy_fd);
|
||||
_Py_END_SUPPRESS_IPH
|
||||
return dummy_fd >= 0;
|
||||
if (PyErr_ExceptionMatches(PyExc_OSError) && !is_valid_fd(fd)) {
|
||||
/* Issue #24891: the file descriptor was closed after the first
|
||||
is_valid_fd() check was called. Ignore the OSError and set the
|
||||
stream to None. */
|
||||
PyErr_Clear();
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Initialize sys.stdin, stdout, stderr and builtins.open */
|
||||
|
@ -1158,30 +1171,18 @@ initstdio(void)
|
|||
* and fileno() may point to an invalid file descriptor. For example
|
||||
* GUI apps don't have valid standard streams by default.
|
||||
*/
|
||||
if (!is_valid_fd(fd)) {
|
||||
std = Py_None;
|
||||
Py_INCREF(std);
|
||||
}
|
||||
else {
|
||||
std = create_stdio(iomod, fd, 0, "<stdin>", encoding, errors);
|
||||
if (std == NULL)
|
||||
goto error;
|
||||
} /* if (fd < 0) */
|
||||
std = create_stdio(iomod, fd, 0, "<stdin>", encoding, errors);
|
||||
if (std == NULL)
|
||||
goto error;
|
||||
PySys_SetObject("__stdin__", std);
|
||||
_PySys_SetObjectId(&PyId_stdin, std);
|
||||
Py_DECREF(std);
|
||||
|
||||
/* Set sys.stdout */
|
||||
fd = fileno(stdout);
|
||||
if (!is_valid_fd(fd)) {
|
||||
std = Py_None;
|
||||
Py_INCREF(std);
|
||||
}
|
||||
else {
|
||||
std = create_stdio(iomod, fd, 1, "<stdout>", encoding, errors);
|
||||
if (std == NULL)
|
||||
goto error;
|
||||
} /* if (fd < 0) */
|
||||
std = create_stdio(iomod, fd, 1, "<stdout>", encoding, errors);
|
||||
if (std == NULL)
|
||||
goto error;
|
||||
PySys_SetObject("__stdout__", std);
|
||||
_PySys_SetObjectId(&PyId_stdout, std);
|
||||
Py_DECREF(std);
|
||||
|
@ -1189,15 +1190,9 @@ initstdio(void)
|
|||
#if 1 /* Disable this if you have trouble debugging bootstrap stuff */
|
||||
/* Set sys.stderr, replaces the preliminary stderr */
|
||||
fd = fileno(stderr);
|
||||
if (!is_valid_fd(fd)) {
|
||||
std = Py_None;
|
||||
Py_INCREF(std);
|
||||
}
|
||||
else {
|
||||
std = create_stdio(iomod, fd, 1, "<stderr>", encoding, "backslashreplace");
|
||||
if (std == NULL)
|
||||
goto error;
|
||||
} /* if (fd < 0) */
|
||||
std = create_stdio(iomod, fd, 1, "<stderr>", encoding, "backslashreplace");
|
||||
if (std == NULL)
|
||||
goto error;
|
||||
|
||||
/* Same as hack above, pre-import stderr's codec to avoid recursion
|
||||
when import.c tries to write to stderr in verbose mode. */
|
||||
|
|
|
@ -531,12 +531,8 @@ _PyTime_GetSystemClockWithInfo(_PyTime_t *t, _Py_clock_info_t *info)
|
|||
|
||||
|
||||
static int
|
||||
pymonotonic_new(_PyTime_t *tp, _Py_clock_info_t *info, int raise)
|
||||
pymonotonic(_PyTime_t *tp, _Py_clock_info_t *info, int raise)
|
||||
{
|
||||
#ifdef Py_DEBUG
|
||||
static int last_set = 0;
|
||||
static _PyTime_t last = 0;
|
||||
#endif
|
||||
#if defined(MS_WINDOWS)
|
||||
ULONGLONG result;
|
||||
|
||||
|
@ -627,12 +623,6 @@ pymonotonic_new(_PyTime_t *tp, _Py_clock_info_t *info, int raise)
|
|||
}
|
||||
if (_PyTime_FromTimespec(tp, &ts, raise) < 0)
|
||||
return -1;
|
||||
#endif
|
||||
#ifdef Py_DEBUG
|
||||
/* monotonic clock cannot go backward */
|
||||
assert(!last_set || last <= *tp);
|
||||
last = *tp;
|
||||
last_set = 1;
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
|
@ -641,7 +631,7 @@ _PyTime_t
|
|||
_PyTime_GetMonotonicClock(void)
|
||||
{
|
||||
_PyTime_t t;
|
||||
if (pymonotonic_new(&t, NULL, 0) < 0) {
|
||||
if (pymonotonic(&t, NULL, 0) < 0) {
|
||||
/* should not happen, _PyTime_Init() checked that monotonic clock at
|
||||
startup */
|
||||
assert(0);
|
||||
|
@ -655,7 +645,7 @@ _PyTime_GetMonotonicClock(void)
|
|||
int
|
||||
_PyTime_GetMonotonicClockWithInfo(_PyTime_t *tp, _Py_clock_info_t *info)
|
||||
{
|
||||
return pymonotonic_new(tp, info, 1);
|
||||
return pymonotonic(tp, info, 1);
|
||||
}
|
||||
|
||||
int
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
@rem Used by the buildbot "test" step.
|
||||
@setlocal
|
||||
@echo off
|
||||
rem Used by the buildbot "test" step.
|
||||
setlocal
|
||||
|
||||
@set here=%~dp0
|
||||
@set rt_opts=-q -d
|
||||
set here=%~dp0
|
||||
set rt_opts=-q -d
|
||||
set regrtest_args=
|
||||
|
||||
:CheckOpts
|
||||
@if '%1'=='-x64' (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
@if '%1'=='-d' (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
@if '%1'=='-O' (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
@if '%1'=='-q' (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
@if '%1'=='+d' (set rt_opts=%rt_opts:-d=%) & shift & goto CheckOpts
|
||||
@if '%1'=='+q' (set rt_opts=%rt_opts:-q=%) & shift & goto CheckOpts
|
||||
if "%1"=="-x64" (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
if "%1"=="-d" (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
if "%1"=="-O" (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
if "%1"=="-q" (set rt_opts=%rt_opts% %1) & shift & goto CheckOpts
|
||||
if "%1"=="+d" (set rt_opts=%rt_opts:-d=%) & shift & goto CheckOpts
|
||||
if "%1"=="+q" (set rt_opts=%rt_opts:-q=%) & shift & goto CheckOpts
|
||||
if NOT "%1"=="" (set regrtest_args=%regrtest_args% %1) & shift & goto CheckOpts
|
||||
|
||||
call "%here%..\..\PCbuild\rt.bat" %rt_opts% -uall -rwW -n --timeout=3600 %1 %2 %3 %4 %5 %6 %7 %8 %9
|
||||
echo on
|
||||
call "%here%..\..\PCbuild\rt.bat" %rt_opts% -uall -rwW -n --timeout=3600 %regrtest_args%
|
||||
|
|
|
@ -7,6 +7,7 @@ set BUILDX86=
|
|||
set BUILDX64=
|
||||
set BUILDDOC=
|
||||
set BUILDPX=
|
||||
set BUILDPACK=
|
||||
|
||||
:CheckOpts
|
||||
if "%~1" EQU "-h" goto Help
|
||||
|
@ -14,6 +15,7 @@ if "%~1" EQU "-x86" (set BUILDX86=1) && shift && goto CheckOpts
|
|||
if "%~1" EQU "-x64" (set BUILDX64=1) && shift && goto CheckOpts
|
||||
if "%~1" EQU "--doc" (set BUILDDOC=1) && shift && goto CheckOpts
|
||||
if "%~1" EQU "--test-marker" (set BUILDPX=1) && shift && goto CheckOpts
|
||||
if "%~1" EQU "--pack" (set BUILDPACK=1) && shift && goto CheckOpts
|
||||
|
||||
if not defined BUILDX86 if not defined BUILDX64 (set BUILDX86=1) && (set BUILDX64=1)
|
||||
|
||||
|
@ -41,6 +43,9 @@ set BUILD_CMD="%D%bundle\snapshot.wixproj"
|
|||
if defined BUILDPX (
|
||||
set BUILD_CMD=%BUILD_CMD% /p:UseTestMarker=true
|
||||
)
|
||||
if defined BUILDPACK (
|
||||
set BUILD_CMD=%BUILD_CMD% /p:Pack=true
|
||||
)
|
||||
|
||||
if defined BUILDX86 (
|
||||
"%PCBUILD%win32\python.exe" "%D%get_wix.py"
|
||||
|
@ -56,9 +61,10 @@ if defined BUILDX64 (
|
|||
exit /B 0
|
||||
|
||||
:Help
|
||||
echo build.bat [-x86] [-x64] [--doc] [-h] [--test-marker]
|
||||
echo build.bat [-x86] [-x64] [--doc] [-h] [--test-marker] [--pack]
|
||||
echo.
|
||||
echo -x86 Build x86 installers
|
||||
echo -x64 Build x64 installers
|
||||
echo --doc Build CHM documentation
|
||||
echo --test-marker Build installers with 'x' markers
|
||||
echo --pack Embed core MSIs into installer
|
||||
|
|
|
@ -9,9 +9,14 @@
|
|||
<Import Project="..\msi.props" />
|
||||
|
||||
<PropertyGroup>
|
||||
<DefineConstants Condition="'$(Pack)' != 'true'">
|
||||
$(DefineConstants);CompressMSI=no;
|
||||
</DefineConstants>
|
||||
<DefineConstants Condition="'$(Pack)' == 'true'">
|
||||
$(DefineConstants);CompressMSI=yes;
|
||||
</DefineConstants>
|
||||
<DefineConstants>
|
||||
$(DefineConstants);
|
||||
CompressMSI=no;
|
||||
CompressPDB=no;
|
||||
CompressMSI_D=no;
|
||||
</DefineConstants>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title_d)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.TitlePdb)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title_d)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title_d)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.TitlePdb)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title_d)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.TitlePdb)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title_d)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.TitlePdb)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title_d)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Product Id="*" Language="!(loc.LCID)" Name="!(loc.TitlePdb)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
|
||||
<Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
|
||||
|
|
Loading…
Reference in New Issue