merge heads
This commit is contained in:
commit
47177861dd
|
@ -583,7 +583,7 @@ And here's an example of changing the counter:
|
|||
|
||||
Because ``yield`` will often be returning ``None``, you should always check for
|
||||
this case. Don't just use its value in expressions unless you're sure that the
|
||||
:meth:`~generator.send` method will be the only method used resume your
|
||||
:meth:`~generator.send` method will be the only method used to resume your
|
||||
generator function.
|
||||
|
||||
In addition to :meth:`~generator.send`, there are two other methods on
|
||||
|
|
|
@ -311,11 +311,10 @@ Creating listening connections
|
|||
|
||||
.. method:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None)
|
||||
|
||||
A :ref:`coroutine <coroutine>` method which creates a TCP server bound to
|
||||
host and port.
|
||||
Create a TCP server bound to host and port. Return an
|
||||
:class:`AbstractServer` object which can be used to stop the service.
|
||||
|
||||
The return value is a :class:`AbstractServer` object which can be used to stop
|
||||
the service.
|
||||
This method is a :ref:`coroutine <coroutine>`.
|
||||
|
||||
If *host* is an empty string or None all interfaces are assumed
|
||||
and a list of multiple sockets will be returned (most likely
|
||||
|
@ -588,10 +587,14 @@ Debug mode
|
|||
|
||||
Get the debug mode (:class:`bool`) of the event loop, ``False`` by default.
|
||||
|
||||
.. versionadded:: 3.4.2
|
||||
|
||||
.. method:: BaseEventLoop.set_debug(enabled: bool)
|
||||
|
||||
Set the debug mode of the event loop.
|
||||
|
||||
.. versionadded:: 3.4.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
The :ref:`Develop with asyncio <asyncio-dev>` section.
|
||||
|
|
|
@ -22,8 +22,8 @@ Create a subprocess: high-level API using Process
|
|||
|
||||
.. function:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
|
||||
|
||||
Run the shell command *cmd* given as a string. Return a :class:`~asyncio.subprocess.Process`
|
||||
instance.
|
||||
Run the shell command *cmd*. See :meth:`BaseEventLoop.subprocess_shell` for
|
||||
parameters. Return a :class:`~asyncio.subprocess.Process` instance.
|
||||
|
||||
The optional *limit* parameter sets the buffer limit passed to the
|
||||
:class:`StreamReader`.
|
||||
|
@ -32,7 +32,8 @@ Create a subprocess: high-level API using Process
|
|||
|
||||
.. function:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
|
||||
|
||||
Create a subprocess. Return a :class:`~asyncio.subprocess.Process` instance.
|
||||
Create a subprocess. See :meth:`BaseEventLoop.subprocess_exec` for
|
||||
parameters. Return a :class:`~asyncio.subprocess.Process` instance.
|
||||
|
||||
The optional *limit* parameter sets the buffer limit passed to the
|
||||
:class:`StreamReader`.
|
||||
|
@ -50,7 +51,9 @@ Run subprocesses asynchronously using the :mod:`subprocess` module.
|
|||
|
||||
.. method:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
|
||||
|
||||
Create a subprocess from one or more string arguments, where the first string
|
||||
Create a subprocess from one or more string arguments (character strings or
|
||||
bytes strings encoded to the :ref:`filesystem encoding
|
||||
<filesystem-encoding>`), where the first string
|
||||
specifies the program to execute, and the remaining strings specify the
|
||||
program's arguments. (Thus, together the string arguments form the
|
||||
``sys.argv`` value of the program, assuming it is a Python script.) This is
|
||||
|
@ -94,8 +97,9 @@ Run subprocesses asynchronously using the :mod:`subprocess` module.
|
|||
|
||||
.. method:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
|
||||
|
||||
Create a subprocess from *cmd*, which is a string using the platform's
|
||||
"shell" syntax. This is similar to the standard library
|
||||
Create a subprocess from *cmd*, which is a character string or a bytes
|
||||
string encoded to the :ref:`filesystem encoding <filesystem-encoding>`,
|
||||
using the platform's "shell" syntax. This is similar to the standard library
|
||||
:class:`subprocess.Popen` class called with ``shell=True``.
|
||||
|
||||
See :meth:`~BaseEventLoop.subprocess_exec` for more details about
|
||||
|
|
|
@ -274,9 +274,10 @@ The following exceptions are the exceptions that are usually raised.
|
|||
|
||||
Raised when the result of an arithmetic operation is too large to be
|
||||
represented. This cannot occur for integers (which would rather raise
|
||||
:exc:`MemoryError` than give up). Because of the lack of standardization of
|
||||
floating point exception handling in C, most floating point operations also
|
||||
aren't checked.
|
||||
:exc:`MemoryError` than give up). However, for historical reasons,
|
||||
OverflowError is sometimes raised for integers that are outside a required
|
||||
range. Because of the lack of standardization of floating point exception
|
||||
handling in C, most floating point operations are not checked.
|
||||
|
||||
|
||||
.. exception:: ReferenceError
|
||||
|
|
|
@ -742,7 +742,8 @@ are always available. They are listed here in alphabetical order.
|
|||
.. function:: len(s)
|
||||
|
||||
Return the length (the number of items) of an object. The argument may be a
|
||||
sequence (string, tuple or list) or a mapping (dictionary).
|
||||
sequence (such as a string, bytes, tuple, list, or range) or a collection
|
||||
(such as a dictionary, set, or frozen set).
|
||||
|
||||
|
||||
.. _func-list:
|
||||
|
|
|
@ -159,6 +159,16 @@ attributes:
|
|||
| | | arguments and local |
|
||||
| | | variables |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| generator | __name__ | name |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | __qualname__ | qualified name |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | gi_frame | frame |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | gi_running | is the generator running? |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | gi_code | code |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| builtin | __doc__ | documentation string |
|
||||
+-----------+-----------------+---------------------------+
|
||||
| | __name__ | original name of this |
|
||||
|
@ -169,6 +179,12 @@ attributes:
|
|||
| | | ``None`` |
|
||||
+-----------+-----------------+---------------------------+
|
||||
|
||||
.. versionchanged:: 3.5
|
||||
|
||||
Add ``__qualname__`` attribute to generators. The ``__name__`` attribute of
|
||||
generators is now set from the function name, instead of the code name, and
|
||||
it can now be modified.
|
||||
|
||||
|
||||
.. function:: getmembers(object[, predicate])
|
||||
|
||||
|
|
|
@ -353,6 +353,12 @@ I/O Base Classes
|
|||
is usual for each of the lines provided to have a line separator at the
|
||||
end.
|
||||
|
||||
.. method:: __del__()
|
||||
|
||||
Prepare for object destruction. :class:`IOBase` provides a default
|
||||
implementation of this method that calls the instance's
|
||||
:meth:`~IOBase.close` method.
|
||||
|
||||
|
||||
.. class:: RawIOBase
|
||||
|
||||
|
|
|
@ -65,6 +65,7 @@ Notes on the availability of these functions:
|
|||
|
||||
|
||||
.. _os-filenames:
|
||||
.. _filesystem-encoding:
|
||||
|
||||
File Names, Command Line Arguments, and Environment Variables
|
||||
-------------------------------------------------------------
|
||||
|
@ -1094,7 +1095,8 @@ or `the MSDN <http://msdn.microsoft.com/en-us/library/z0kc8e3z.aspx>`_ on Window
|
|||
|
||||
.. note::
|
||||
|
||||
For a higher-level version of this see :mod:`socket.socket.sendfile`.
|
||||
For a higher-level wrapper of :func:`sendfile`, see
|
||||
:mod:`socket.socket.sendfile`.
|
||||
|
||||
.. versionadded:: 3.3
|
||||
|
||||
|
@ -1903,6 +1905,11 @@ features:
|
|||
* :attr:`st_creator`
|
||||
* :attr:`st_type`
|
||||
|
||||
On Windows systems, the following attribute is also available:
|
||||
|
||||
* :attr:`st_file_attributes` - Windows file attribute bits (see the
|
||||
``FILE_ATTRIBUTE_*`` constants in the :mod:`stat` module)
|
||||
|
||||
.. note::
|
||||
|
||||
The exact meaning and resolution of the :attr:`st_atime`,
|
||||
|
@ -1956,6 +1963,9 @@ features:
|
|||
and the :attr:`st_atime_ns`, :attr:`st_mtime_ns`,
|
||||
and :attr:`st_ctime_ns` members.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
Added the :attr:`st_file_attributes` member on Windows.
|
||||
|
||||
|
||||
.. function:: stat_float_times([newvalue])
|
||||
|
||||
|
@ -2235,9 +2245,11 @@ features:
|
|||
|
||||
If optional argument *topdown* is ``True`` or not specified, the triple for a
|
||||
directory is generated before the triples for any of its subdirectories
|
||||
(directories are generated top-down). If *topdown* is ``False``, the triple for a
|
||||
directory is generated after the triples for all of its subdirectories
|
||||
(directories are generated bottom-up).
|
||||
(directories are generated top-down). If *topdown* is ``False``, the triple
|
||||
for a directory is generated after the triples for all of its subdirectories
|
||||
(directories are generated bottom-up). No matter the value of *topdown*, the
|
||||
list of subdirectories is retrieved before the tuples for the directory and
|
||||
its subdirectories are generated.
|
||||
|
||||
When *topdown* is ``True``, the caller can modify the *dirnames* list in-place
|
||||
(perhaps using :keyword:`del` or slice assignment), and :func:`walk` will only
|
||||
|
|
|
@ -407,7 +407,7 @@ The remaining methods are specific to audio mixing:
|
|||
(silent) to 100 (full volume). If the control is monophonic, a 2-tuple is still
|
||||
returned, but both volumes are the same.
|
||||
|
||||
Raises :exc:`OSSAudioError` if an invalid control was is specified, or
|
||||
Raises :exc:`OSSAudioError` if an invalid control is specified, or
|
||||
:exc:`OSError` if an unsupported control is specified.
|
||||
|
||||
|
||||
|
|
|
@ -458,8 +458,8 @@ form.
|
|||
.. function:: compile(pattern, flags=0)
|
||||
|
||||
Compile a regular expression pattern into a regular expression object, which
|
||||
can be used for matching using its :func:`match` and :func:`search` methods,
|
||||
described below.
|
||||
can be used for matching using its :func:`~regex.match` and
|
||||
:func:`~regex.search` methods, described below.
|
||||
|
||||
The expression's behaviour can be modified by specifying a *flags* value.
|
||||
Values can be any of the following variables, combined using bitwise OR (the
|
||||
|
|
|
@ -28,6 +28,9 @@ The :mod:`runpy` module provides two functions:
|
|||
|
||||
.. function:: run_module(mod_name, init_globals=None, run_name=None, alter_sys=False)
|
||||
|
||||
.. index::
|
||||
module: __main__
|
||||
|
||||
Execute the code of the specified module and return the resulting module
|
||||
globals dictionary. The module's code is first located using the standard
|
||||
import mechanism (refer to :pep:`302` for details) and then executed in a
|
||||
|
@ -87,6 +90,9 @@ The :mod:`runpy` module provides two functions:
|
|||
|
||||
.. function:: run_path(file_path, init_globals=None, run_name=None)
|
||||
|
||||
.. index::
|
||||
module: __main__
|
||||
|
||||
Execute the code at the named filesystem location and return the resulting
|
||||
module globals dictionary. As with a script name supplied to the CPython
|
||||
command line, the supplied path may refer to a Python source file, a
|
||||
|
|
|
@ -126,7 +126,7 @@ Example::
|
|||
if __name__ == '__main__':
|
||||
walktree(sys.argv[1], visitfile)
|
||||
|
||||
An additional utility function is provided to covert a file's mode in a human
|
||||
An additional utility function is provided to convert a file's mode in a human
|
||||
readable string:
|
||||
|
||||
.. function:: filemode(mode)
|
||||
|
@ -399,3 +399,29 @@ The following flags can be used in the *flags* argument of :func:`os.chflags`:
|
|||
The file is a snapshot file.
|
||||
|
||||
See the \*BSD or Mac OS systems man page :manpage:`chflags(2)` for more information.
|
||||
|
||||
On Windows, the following file attribute constants are available for use when
|
||||
testing bits in the ``st_file_attributes`` member returned by :func:`os.stat`.
|
||||
See the `Windows API documentation
|
||||
<http://msdn.microsoft.com/en-us/library/windows/desktop/gg258117.aspx>`_
|
||||
for more detail on the meaning of these constants.
|
||||
|
||||
.. data:: FILE_ATTRIBUTE_ARCHIVE
|
||||
FILE_ATTRIBUTE_COMPRESSED
|
||||
FILE_ATTRIBUTE_DEVICE
|
||||
FILE_ATTRIBUTE_DIRECTORY
|
||||
FILE_ATTRIBUTE_ENCRYPTED
|
||||
FILE_ATTRIBUTE_HIDDEN
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM
|
||||
FILE_ATTRIBUTE_NORMAL
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA
|
||||
FILE_ATTRIBUTE_OFFLINE
|
||||
FILE_ATTRIBUTE_READONLY
|
||||
FILE_ATTRIBUTE_REPARSE_POINT
|
||||
FILE_ATTRIBUTE_SPARSE_FILE
|
||||
FILE_ATTRIBUTE_SYSTEM
|
||||
FILE_ATTRIBUTE_TEMPORARY
|
||||
FILE_ATTRIBUTE_VIRTUAL
|
||||
|
||||
.. versionadded:: 3.5
|
||||
|
|
|
@ -176,6 +176,15 @@ ipaddress
|
|||
network objects from existing addresses (contributed by Peter Moody
|
||||
and Antoine Pitrou in :issue:`16531`).
|
||||
|
||||
os
|
||||
--
|
||||
|
||||
* :class:`os.stat_result` now has a ``st_file_attributes`` field on Windows,
|
||||
containing the ``dwFileAttributes`` member of the
|
||||
``BY_HANDLE_FILE_INFORMATION`` structure returned by
|
||||
``GetFileInformationByHandle()`` (contributed by Ben Hoyt in
|
||||
:issue:`21719`).
|
||||
|
||||
shutil
|
||||
------
|
||||
|
||||
|
@ -304,6 +313,12 @@ Changes in the Python API
|
|||
or :exc:`ssl.SSLWantWriteError` on a non-blocking socket if the operation
|
||||
would block. Previously, it would return 0. See :issue:`20951`.
|
||||
|
||||
* The ``__name__`` attribute of generator is now set from the function name,
|
||||
instead of being set from the code name. Use ``gen.gi_code.co_name`` to
|
||||
retrieve the code name. Generators also have a new ``__qualname__``
|
||||
attribute, the qualified name, which is now used for the representation
|
||||
of a generator (``repr(gen)``). See :issue:`21205`.
|
||||
|
||||
Changes in the C API
|
||||
--------------------
|
||||
|
||||
|
|
|
@ -25,6 +25,12 @@ typedef struct {
|
|||
|
||||
/* List of weak reference. */
|
||||
PyObject *gi_weakreflist;
|
||||
|
||||
/* Name of the generator. */
|
||||
PyObject *gi_name;
|
||||
|
||||
/* Qualified name of the generator. */
|
||||
PyObject *gi_qualname;
|
||||
} PyGenObject;
|
||||
|
||||
PyAPI_DATA(PyTypeObject) PyGen_Type;
|
||||
|
@ -33,6 +39,8 @@ PyAPI_DATA(PyTypeObject) PyGen_Type;
|
|||
#define PyGen_CheckExact(op) (Py_TYPE(op) == &PyGen_Type)
|
||||
|
||||
PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *);
|
||||
PyAPI_FUNC(PyObject *) PyGen_NewWithQualName(struct _frame *,
|
||||
PyObject *name, PyObject *qualname);
|
||||
PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *);
|
||||
PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **);
|
||||
PyObject *_PyGen_Send(PyGenObject *, PyObject *);
|
||||
|
|
|
@ -17,6 +17,7 @@ to modify the meaning of the API call itself.
|
|||
import collections
|
||||
import concurrent.futures
|
||||
import heapq
|
||||
import inspect
|
||||
import logging
|
||||
import socket
|
||||
import subprocess
|
||||
|
@ -37,6 +38,15 @@ __all__ = ['BaseEventLoop', 'Server']
|
|||
_MAX_WORKERS = 5
|
||||
|
||||
|
||||
def _format_handle(handle):
|
||||
cb = handle._callback
|
||||
if inspect.ismethod(cb) and isinstance(cb.__self__, tasks.Task):
|
||||
# format the task
|
||||
return repr(cb.__self__)
|
||||
else:
|
||||
return str(handle)
|
||||
|
||||
|
||||
class _StopError(BaseException):
|
||||
"""Raised to stop the event loop."""
|
||||
|
||||
|
@ -128,6 +138,9 @@ class BaseEventLoop(events.AbstractEventLoop):
|
|||
self._clock_resolution = time.get_clock_info('monotonic').resolution
|
||||
self._exception_handler = None
|
||||
self._debug = False
|
||||
# In debug mode, if the execution of a callback or a step of a task
|
||||
# exceed this duration in seconds, the slow callback/task is logged.
|
||||
self.slow_callback_duration = 0.1
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s running=%s closed=%s debug=%s>'
|
||||
|
@ -320,7 +333,7 @@ class BaseEventLoop(events.AbstractEventLoop):
|
|||
"than the current one")
|
||||
|
||||
def call_soon_threadsafe(self, callback, *args):
|
||||
"""XXX"""
|
||||
"""Like call_soon(), but thread safe."""
|
||||
handle = self._call_soon(callback, args, check_loop=False)
|
||||
self._write_to_self()
|
||||
return handle
|
||||
|
@ -358,7 +371,17 @@ class BaseEventLoop(events.AbstractEventLoop):
|
|||
def create_connection(self, protocol_factory, host=None, port=None, *,
|
||||
ssl=None, family=0, proto=0, flags=0, sock=None,
|
||||
local_addr=None, server_hostname=None):
|
||||
"""XXX"""
|
||||
"""Connect to a TCP server.
|
||||
|
||||
Create a streaming transport connection to a given Internet host and
|
||||
port: socket family AF_INET or socket.AF_INET6 depending on host (or
|
||||
family if specified), socket type SOCK_STREAM. protocol_factory must be
|
||||
a callable returning a protocol instance.
|
||||
|
||||
This method is a coroutine which will try to establish the connection
|
||||
in the background. When successful, the coroutine returns a
|
||||
(transport, protocol) pair.
|
||||
"""
|
||||
if server_hostname is not None and not ssl:
|
||||
raise ValueError('server_hostname is only meaningful with ssl')
|
||||
|
||||
|
@ -557,7 +580,12 @@ class BaseEventLoop(events.AbstractEventLoop):
|
|||
backlog=100,
|
||||
ssl=None,
|
||||
reuse_address=None):
|
||||
"""XXX"""
|
||||
"""Create a TCP server bound to host and port.
|
||||
|
||||
Return an AbstractServer object which can be used to stop the service.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
if isinstance(ssl, bool):
|
||||
raise TypeError('ssl argument must be an SSLContext or None')
|
||||
if host is not None or port is not None:
|
||||
|
@ -808,16 +836,16 @@ class BaseEventLoop(events.AbstractEventLoop):
|
|||
if logger.isEnabledFor(logging.INFO):
|
||||
t0 = self.time()
|
||||
event_list = self._selector.select(timeout)
|
||||
t1 = self.time()
|
||||
if t1-t0 >= 1:
|
||||
dt = self.time() - t0
|
||||
if dt >= 1:
|
||||
level = logging.INFO
|
||||
else:
|
||||
level = logging.DEBUG
|
||||
if timeout is not None:
|
||||
logger.log(level, 'poll %.3f took %.3f seconds',
|
||||
timeout, t1-t0)
|
||||
timeout, dt)
|
||||
else:
|
||||
logger.log(level, 'poll took %.3f seconds', t1-t0)
|
||||
logger.log(level, 'poll took %.3f seconds', dt)
|
||||
else:
|
||||
event_list = self._selector.select(timeout)
|
||||
self._process_events(event_list)
|
||||
|
@ -840,7 +868,16 @@ class BaseEventLoop(events.AbstractEventLoop):
|
|||
ntodo = len(self._ready)
|
||||
for i in range(ntodo):
|
||||
handle = self._ready.popleft()
|
||||
if not handle._cancelled:
|
||||
if handle._cancelled:
|
||||
continue
|
||||
if self._debug:
|
||||
t0 = self.time()
|
||||
handle._run()
|
||||
dt = self.time() - t0
|
||||
if dt >= self.slow_callback_duration:
|
||||
logger.warning('Executing %s took %.3f seconds',
|
||||
_format_handle(handle), dt)
|
||||
else:
|
||||
handle._run()
|
||||
handle = None # Needed to break cycles when an exception occurs.
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ class Queue:
|
|||
if self._maxsize <= 0:
|
||||
return False
|
||||
else:
|
||||
return self.qsize() == self._maxsize
|
||||
return self.qsize() >= self._maxsize
|
||||
|
||||
@coroutine
|
||||
def put(self, item):
|
||||
|
@ -126,7 +126,7 @@ class Queue:
|
|||
self._put(item)
|
||||
getter.set_result(self._get())
|
||||
|
||||
elif self._maxsize > 0 and self._maxsize == self.qsize():
|
||||
elif self._maxsize > 0 and self._maxsize <= self.qsize():
|
||||
waiter = futures.Future(loop=self._loop)
|
||||
|
||||
self._putters.append((item, waiter))
|
||||
|
@ -152,7 +152,7 @@ class Queue:
|
|||
self._put(item)
|
||||
getter.set_result(self._get())
|
||||
|
||||
elif self._maxsize > 0 and self._maxsize == self.qsize():
|
||||
elif self._maxsize > 0 and self._maxsize <= self.qsize():
|
||||
raise QueueFull
|
||||
else:
|
||||
self._put(item)
|
||||
|
|
|
@ -83,10 +83,15 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
|
|||
self.add_reader(self._ssock.fileno(), self._read_from_self)
|
||||
|
||||
def _read_from_self(self):
|
||||
while True:
|
||||
try:
|
||||
self._ssock.recv(1)
|
||||
except (BlockingIOError, InterruptedError):
|
||||
pass
|
||||
data = self._ssock.recv(4096)
|
||||
if not data:
|
||||
break
|
||||
except InterruptedError:
|
||||
continue
|
||||
except BlockingIOError:
|
||||
break
|
||||
|
||||
def _write_to_self(self):
|
||||
# This may be called from a different thread, possibly after
|
||||
|
@ -221,7 +226,14 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
|
|||
return False
|
||||
|
||||
def sock_recv(self, sock, n):
|
||||
"""XXX"""
|
||||
"""Receive data from the socket.
|
||||
|
||||
The return value is a bytes object representing the data received.
|
||||
The maximum amount of data to be received at once is specified by
|
||||
nbytes.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
fut = futures.Future(loop=self)
|
||||
self._sock_recv(fut, False, sock, n)
|
||||
return fut
|
||||
|
@ -248,7 +260,16 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
|
|||
fut.set_result(data)
|
||||
|
||||
def sock_sendall(self, sock, data):
|
||||
"""XXX"""
|
||||
"""Send data to the socket.
|
||||
|
||||
The socket must be connected to a remote socket. This method continues
|
||||
to send data from data until either all data has been sent or an
|
||||
error occurs. None is returned on success. On error, an exception is
|
||||
raised, and there is no way to determine how much data, if any, was
|
||||
successfully processed by the receiving end of the connection.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
fut = futures.Future(loop=self)
|
||||
if data:
|
||||
self._sock_sendall(fut, False, sock, data)
|
||||
|
@ -280,7 +301,16 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
|
|||
self.add_writer(fd, self._sock_sendall, fut, True, sock, data)
|
||||
|
||||
def sock_connect(self, sock, address):
|
||||
"""XXX"""
|
||||
"""Connect to a remote socket at address.
|
||||
|
||||
The address must be already resolved to avoid the trap of hanging the
|
||||
entire event loop when the address requires doing a DNS lookup. For
|
||||
example, it must be an IP address, not an hostname, for AF_INET and
|
||||
AF_INET6 address families. Use getaddrinfo() to resolve the hostname
|
||||
asynchronously.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
fut = futures.Future(loop=self)
|
||||
try:
|
||||
base_events._check_resolved_address(sock, address)
|
||||
|
@ -313,7 +343,15 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
|
|||
fut.set_result(None)
|
||||
|
||||
def sock_accept(self, sock):
|
||||
"""XXX"""
|
||||
"""Accept a connection.
|
||||
|
||||
The socket must be bound to an address and listening for connections.
|
||||
The return value is a pair (conn, address) where conn is a new socket
|
||||
object usable to send and receive data on the connection, and address
|
||||
is the address bound to the socket on the other end of the connection.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
fut = futures.Future(loop=self)
|
||||
self._sock_accept(fut, False, sock)
|
||||
return fut
|
||||
|
|
|
@ -32,12 +32,12 @@ from .log import logger
|
|||
_DEBUG = (not sys.flags.ignore_environment
|
||||
and bool(os.environ.get('PYTHONASYNCIODEBUG')))
|
||||
|
||||
_PY35 = (sys.version_info >= (3, 5))
|
||||
|
||||
|
||||
class CoroWrapper:
|
||||
# Wrapper for coroutine in _DEBUG mode.
|
||||
|
||||
__slots__ = ['gen', 'func', '__name__', '__doc__', '__weakref__']
|
||||
|
||||
def __init__(self, gen, func):
|
||||
assert inspect.isgenerator(gen), gen
|
||||
self.gen = gen
|
||||
|
@ -111,8 +111,10 @@ def coroutine(func):
|
|||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwds):
|
||||
w = CoroWrapper(coro(*args, **kwds), func)
|
||||
w.__name__ = coro.__name__
|
||||
w.__doc__ = coro.__doc__
|
||||
w.__name__ = func.__name__
|
||||
if _PY35:
|
||||
w.__qualname__ = func.__qualname__
|
||||
w.__doc__ = func.__doc__
|
||||
return w
|
||||
|
||||
wrapper._is_coroutine = True # For iscoroutinefunction().
|
||||
|
@ -190,7 +192,7 @@ class Task(futures.Future):
|
|||
i = len(res)
|
||||
text = self._coro.__name__
|
||||
coro = self._coro
|
||||
if inspect.isgenerator(coro):
|
||||
if iscoroutine(coro):
|
||||
filename = coro.gi_code.co_filename
|
||||
if coro.gi_frame is not None:
|
||||
text += ' at %s:%s' % (filename, coro.gi_frame.f_lineno)
|
||||
|
|
|
@ -11,6 +11,7 @@ import sys
|
|||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
from http.server import HTTPServer
|
||||
|
@ -379,3 +380,20 @@ def get_function_source(func):
|
|||
if source is None:
|
||||
raise ValueError("unable to get the source of %r" % (func,))
|
||||
return source
|
||||
|
||||
|
||||
class TestCase(unittest.TestCase):
|
||||
def set_event_loop(self, loop, *, cleanup=True):
|
||||
assert loop is not None
|
||||
# ensure that the event loop is passed explicitly in asyncio
|
||||
events.set_event_loop(None)
|
||||
if cleanup:
|
||||
self.addCleanup(loop.close)
|
||||
|
||||
def new_test_loop(self, gen=None):
|
||||
loop = TestLoop(gen)
|
||||
self.set_event_loop(loop)
|
||||
return loop
|
||||
|
||||
def tearDown(self):
|
||||
events.set_event_loop(None)
|
||||
|
|
|
@ -12,7 +12,7 @@ import hashlib
|
|||
from base64 import standard_b64encode
|
||||
from urllib.request import urlopen, Request, HTTPError
|
||||
from urllib.parse import urlparse
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
from distutils.core import PyPIRCCommand
|
||||
from distutils.spawn import spawn
|
||||
from distutils import log
|
||||
|
@ -184,7 +184,7 @@ class upload(PyPIRCCommand):
|
|||
reason = result.msg
|
||||
except OSError as e:
|
||||
self.announce(str(e), log.ERROR)
|
||||
return
|
||||
raise
|
||||
except HTTPError as e:
|
||||
status = e.code
|
||||
reason = e.msg
|
||||
|
@ -193,8 +193,9 @@ class upload(PyPIRCCommand):
|
|||
self.announce('Server response (%s): %s' % (status, reason),
|
||||
log.INFO)
|
||||
else:
|
||||
self.announce('Upload failed (%s): %s' % (status, reason),
|
||||
log.ERROR)
|
||||
msg = 'Upload failed (%s): %s' % (status, reason)
|
||||
self.announce(msg, log.ERROR)
|
||||
raise DistutilsError(msg)
|
||||
if self.show_response:
|
||||
text = self._read_pypi_response(result)
|
||||
msg = '\n'.join(('-' * 75, text, '-' * 75))
|
||||
|
|
|
@ -6,6 +6,7 @@ from test.support import run_unittest
|
|||
from distutils.command import upload as upload_mod
|
||||
from distutils.command.upload import upload
|
||||
from distutils.core import Distribution
|
||||
from distutils.errors import DistutilsError
|
||||
from distutils.log import INFO
|
||||
|
||||
from distutils.tests.test_config import PYPIRC, PyPIRCCommandTestCase
|
||||
|
@ -41,13 +42,14 @@ username:me
|
|||
|
||||
class FakeOpen(object):
|
||||
|
||||
def __init__(self, url):
|
||||
def __init__(self, url, msg=None, code=None):
|
||||
self.url = url
|
||||
if not isinstance(url, str):
|
||||
self.req = url
|
||||
else:
|
||||
self.req = None
|
||||
self.msg = 'OK'
|
||||
self.msg = msg or 'OK'
|
||||
self.code = code or 200
|
||||
|
||||
def getheader(self, name, default=None):
|
||||
return {
|
||||
|
@ -58,7 +60,7 @@ class FakeOpen(object):
|
|||
return b'xyzzy'
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
return self.code
|
||||
|
||||
|
||||
class uploadTestCase(PyPIRCCommandTestCase):
|
||||
|
@ -68,13 +70,15 @@ class uploadTestCase(PyPIRCCommandTestCase):
|
|||
self.old_open = upload_mod.urlopen
|
||||
upload_mod.urlopen = self._urlopen
|
||||
self.last_open = None
|
||||
self.next_msg = None
|
||||
self.next_code = None
|
||||
|
||||
def tearDown(self):
|
||||
upload_mod.urlopen = self.old_open
|
||||
super(uploadTestCase, self).tearDown()
|
||||
|
||||
def _urlopen(self, url):
|
||||
self.last_open = FakeOpen(url)
|
||||
self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code)
|
||||
return self.last_open
|
||||
|
||||
def test_finalize_options(self):
|
||||
|
@ -135,6 +139,10 @@ class uploadTestCase(PyPIRCCommandTestCase):
|
|||
results = self.get_logs(INFO)
|
||||
self.assertIn('xyzzy\n', results[-1])
|
||||
|
||||
def test_upload_fails(self):
|
||||
self.next_msg = "Not Found"
|
||||
self.next_code = 404
|
||||
self.assertRaises(DistutilsError, self.test_upload)
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(uploadTestCase)
|
||||
|
|
32
Lib/heapq.py
32
Lib/heapq.py
|
@ -311,16 +311,6 @@ def _siftup_max(heap, pos):
|
|||
heap[pos] = newitem
|
||||
_siftdown_max(heap, startpos, pos)
|
||||
|
||||
# If available, use C implementation
|
||||
try:
|
||||
from _heapq import *
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
from _heapq import _heapreplace_max
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def merge(*iterables, key=None, reverse=False):
|
||||
'''Merge multiple sorted inputs into a single sorted output.
|
||||
|
||||
|
@ -474,7 +464,7 @@ def nsmallest(n, iterable, key=None):
|
|||
Equivalent to: sorted(iterable, key=key)[:n]
|
||||
"""
|
||||
|
||||
# Short-cut for n==1 is to use min() when len(iterable)>0
|
||||
# Short-cut for n==1 is to use min()
|
||||
if n == 1:
|
||||
it = iter(iterable)
|
||||
sentinel = object()
|
||||
|
@ -537,7 +527,7 @@ def nlargest(n, iterable, key=None):
|
|||
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
|
||||
"""
|
||||
|
||||
# Short-cut for n==1 is to use max() when len(iterable)>0
|
||||
# Short-cut for n==1 is to use max()
|
||||
if n == 1:
|
||||
it = iter(iterable)
|
||||
sentinel = object()
|
||||
|
@ -592,6 +582,24 @@ def nlargest(n, iterable, key=None):
|
|||
result.sort(reverse=True)
|
||||
return [r[2] for r in result]
|
||||
|
||||
# If available, use C implementation
|
||||
try:
|
||||
from _heapq import *
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
from _heapq import _heapreplace_max
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
from _heapq import _heapify_max
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
from _heapq import _heappop_max
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
|
|
|
@ -977,7 +977,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
|
|||
(and the next character is a '/' or the end of the string).
|
||||
|
||||
"""
|
||||
collapsed_path = _url_collapse_path(self.path)
|
||||
collapsed_path = _url_collapse_path(urllib.parse.unquote(self.path))
|
||||
dir_sep = collapsed_path.find('/', 1)
|
||||
head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
|
||||
if head in self.cgi_directories:
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
"""
|
||||
HyperParser
|
||||
===========
|
||||
This module defines the HyperParser class, which provides advanced parsing
|
||||
abilities for the ParenMatch and other extensions.
|
||||
The HyperParser uses PyParser. PyParser is intended mostly to give information
|
||||
on the proper indentation of code. HyperParser gives some information on the
|
||||
structure of code, used by extensions to help the user.
|
||||
"""Provide advanced parsing abilities for ParenMatch and other extensions.
|
||||
|
||||
HyperParser uses PyParser. PyParser mostly gives information on the
|
||||
proper indentation of code. HyperParser gives additional information on
|
||||
the structure of code.
|
||||
"""
|
||||
|
||||
import string
|
||||
|
@ -15,9 +12,7 @@ from idlelib import PyParse
|
|||
class HyperParser:
|
||||
|
||||
def __init__(self, editwin, index):
|
||||
"""Initialize the HyperParser to analyze the surroundings of the given
|
||||
index.
|
||||
"""
|
||||
"To initialize, analyze the surroundings of the given index."
|
||||
|
||||
self.editwin = editwin
|
||||
self.text = text = editwin.text
|
||||
|
@ -33,9 +28,10 @@ class HyperParser:
|
|||
startat = max(lno - context, 1)
|
||||
startatindex = repr(startat) + ".0"
|
||||
stopatindex = "%d.end" % lno
|
||||
# We add the newline because PyParse requires a newline at end.
|
||||
# We add a space so that index won't be at end of line, so that
|
||||
# its status will be the same as the char before it, if should.
|
||||
# We add the newline because PyParse requires a newline
|
||||
# at end. We add a space so that index won't be at end
|
||||
# of line, so that its status will be the same as the
|
||||
# char before it, if should.
|
||||
parser.set_str(text.get(startatindex, stopatindex)+' \n')
|
||||
bod = parser.find_good_parse_start(
|
||||
editwin._build_char_in_string_func(startatindex))
|
||||
|
@ -49,122 +45,131 @@ class HyperParser:
|
|||
else:
|
||||
startatindex = "1.0"
|
||||
stopatindex = "%d.end" % lno
|
||||
# We add the newline because PyParse requires a newline at end.
|
||||
# We add a space so that index won't be at end of line, so that
|
||||
# its status will be the same as the char before it, if should.
|
||||
# We add the newline because PyParse requires it. We add a
|
||||
# space so that index won't be at end of line, so that its
|
||||
# status will be the same as the char before it, if should.
|
||||
parser.set_str(text.get(startatindex, stopatindex)+' \n')
|
||||
parser.set_lo(0)
|
||||
|
||||
# We want what the parser has, except for the last newline and space.
|
||||
# We want what the parser has, minus the last newline and space.
|
||||
self.rawtext = parser.str[:-2]
|
||||
# As far as I can see, parser.str preserves the statement we are in,
|
||||
# so that stopatindex can be used to synchronize the string with the
|
||||
# text box indices.
|
||||
# Parser.str apparently preserves the statement we are in, so
|
||||
# that stopatindex can be used to synchronize the string with
|
||||
# the text box indices.
|
||||
self.stopatindex = stopatindex
|
||||
self.bracketing = parser.get_last_stmt_bracketing()
|
||||
# find which pairs of bracketing are openers. These always correspond
|
||||
# to a character of rawtext.
|
||||
self.isopener = [i>0 and self.bracketing[i][1] > self.bracketing[i-1][1]
|
||||
# find which pairs of bracketing are openers. These always
|
||||
# correspond to a character of rawtext.
|
||||
self.isopener = [i>0 and self.bracketing[i][1] >
|
||||
self.bracketing[i-1][1]
|
||||
for i in range(len(self.bracketing))]
|
||||
|
||||
self.set_index(index)
|
||||
|
||||
def set_index(self, index):
|
||||
"""Set the index to which the functions relate. Note that it must be
|
||||
in the same statement.
|
||||
"""Set the index to which the functions relate.
|
||||
|
||||
The index must be in the same statement.
|
||||
"""
|
||||
indexinrawtext = \
|
||||
len(self.rawtext) - len(self.text.get(index, self.stopatindex))
|
||||
indexinrawtext = (len(self.rawtext) -
|
||||
len(self.text.get(index, self.stopatindex)))
|
||||
if indexinrawtext < 0:
|
||||
raise ValueError("The index given is before the analyzed statement")
|
||||
raise ValueError("Index %s precedes the analyzed statement"
|
||||
% index)
|
||||
self.indexinrawtext = indexinrawtext
|
||||
# find the rightmost bracket to which index belongs
|
||||
self.indexbracket = 0
|
||||
while self.indexbracket < len(self.bracketing)-1 and \
|
||||
self.bracketing[self.indexbracket+1][0] < self.indexinrawtext:
|
||||
while (self.indexbracket < len(self.bracketing)-1 and
|
||||
self.bracketing[self.indexbracket+1][0] < self.indexinrawtext):
|
||||
self.indexbracket += 1
|
||||
if self.indexbracket < len(self.bracketing)-1 and \
|
||||
self.bracketing[self.indexbracket+1][0] == self.indexinrawtext and \
|
||||
not self.isopener[self.indexbracket+1]:
|
||||
if (self.indexbracket < len(self.bracketing)-1 and
|
||||
self.bracketing[self.indexbracket+1][0] == self.indexinrawtext and
|
||||
not self.isopener[self.indexbracket+1]):
|
||||
self.indexbracket += 1
|
||||
|
||||
def is_in_string(self):
|
||||
"""Is the index given to the HyperParser is in a string?"""
|
||||
"""Is the index given to the HyperParser in a string?"""
|
||||
# The bracket to which we belong should be an opener.
|
||||
# If it's an opener, it has to have a character.
|
||||
return self.isopener[self.indexbracket] and \
|
||||
self.rawtext[self.bracketing[self.indexbracket][0]] in ('"', "'")
|
||||
return (self.isopener[self.indexbracket] and
|
||||
self.rawtext[self.bracketing[self.indexbracket][0]]
|
||||
in ('"', "'"))
|
||||
|
||||
def is_in_code(self):
|
||||
"""Is the index given to the HyperParser is in a normal code?"""
|
||||
return not self.isopener[self.indexbracket] or \
|
||||
self.rawtext[self.bracketing[self.indexbracket][0]] not in \
|
||||
('#', '"', "'")
|
||||
"""Is the index given to the HyperParser in normal code?"""
|
||||
return (not self.isopener[self.indexbracket] or
|
||||
self.rawtext[self.bracketing[self.indexbracket][0]]
|
||||
not in ('#', '"', "'"))
|
||||
|
||||
def get_surrounding_brackets(self, openers='([{', mustclose=False):
|
||||
"""If the index given to the HyperParser is surrounded by a bracket
|
||||
defined in openers (or at least has one before it), return the
|
||||
indices of the opening bracket and the closing bracket (or the
|
||||
end of line, whichever comes first).
|
||||
If it is not surrounded by brackets, or the end of line comes before
|
||||
the closing bracket and mustclose is True, returns None.
|
||||
"""Return bracket indexes or None.
|
||||
|
||||
If the index given to the HyperParser is surrounded by a
|
||||
bracket defined in openers (or at least has one before it),
|
||||
return the indices of the opening bracket and the closing
|
||||
bracket (or the end of line, whichever comes first).
|
||||
|
||||
If it is not surrounded by brackets, or the end of line comes
|
||||
before the closing bracket and mustclose is True, returns None.
|
||||
"""
|
||||
|
||||
bracketinglevel = self.bracketing[self.indexbracket][1]
|
||||
before = self.indexbracket
|
||||
while not self.isopener[before] or \
|
||||
self.rawtext[self.bracketing[before][0]] not in openers or \
|
||||
self.bracketing[before][1] > bracketinglevel:
|
||||
while (not self.isopener[before] or
|
||||
self.rawtext[self.bracketing[before][0]] not in openers or
|
||||
self.bracketing[before][1] > bracketinglevel):
|
||||
before -= 1
|
||||
if before < 0:
|
||||
return None
|
||||
bracketinglevel = min(bracketinglevel, self.bracketing[before][1])
|
||||
after = self.indexbracket + 1
|
||||
while after < len(self.bracketing) and \
|
||||
self.bracketing[after][1] >= bracketinglevel:
|
||||
while (after < len(self.bracketing) and
|
||||
self.bracketing[after][1] >= bracketinglevel):
|
||||
after += 1
|
||||
|
||||
beforeindex = self.text.index("%s-%dc" %
|
||||
(self.stopatindex, len(self.rawtext)-self.bracketing[before][0]))
|
||||
if after >= len(self.bracketing) or \
|
||||
self.bracketing[after][0] > len(self.rawtext):
|
||||
if (after >= len(self.bracketing) or
|
||||
self.bracketing[after][0] > len(self.rawtext)):
|
||||
if mustclose:
|
||||
return None
|
||||
afterindex = self.stopatindex
|
||||
else:
|
||||
# We are after a real char, so it is a ')' and we give the index
|
||||
# before it.
|
||||
afterindex = self.text.index("%s-%dc" %
|
||||
(self.stopatindex,
|
||||
# We are after a real char, so it is a ')' and we give the
|
||||
# index before it.
|
||||
afterindex = self.text.index(
|
||||
"%s-%dc" % (self.stopatindex,
|
||||
len(self.rawtext)-(self.bracketing[after][0]-1)))
|
||||
|
||||
return beforeindex, afterindex
|
||||
|
||||
# This string includes all chars that may be in a white space
|
||||
# Ascii chars that may be in a white space
|
||||
_whitespace_chars = " \t\n\\"
|
||||
# This string includes all chars that may be in an identifier
|
||||
# Ascii chars that may be in an identifier
|
||||
_id_chars = string.ascii_letters + string.digits + "_"
|
||||
# This string includes all chars that may be the first char of an identifier
|
||||
# Ascii chars that may be the first char of an identifier
|
||||
_id_first_chars = string.ascii_letters + "_"
|
||||
|
||||
# Given a string and pos, return the number of chars in the identifier
|
||||
# which ends at pos, or 0 if there is no such one. Saved words are not
|
||||
# identifiers.
|
||||
# Given a string and pos, return the number of chars in the
|
||||
# identifier which ends at pos, or 0 if there is no such one. Saved
|
||||
# words are not identifiers.
|
||||
def _eat_identifier(self, str, limit, pos):
|
||||
i = pos
|
||||
while i > limit and str[i-1] in self._id_chars:
|
||||
i -= 1
|
||||
if i < pos and (str[i] not in self._id_first_chars or \
|
||||
keyword.iskeyword(str[i:pos])):
|
||||
if (i < pos and (str[i] not in self._id_first_chars or
|
||||
(keyword.iskeyword(str[i:pos]) and
|
||||
str[i:pos] not in {'None', 'False', 'True'}))):
|
||||
i = pos
|
||||
return pos - i
|
||||
|
||||
def get_expression(self):
|
||||
"""Return a string with the Python expression which ends at the given
|
||||
index, which is empty if there is no real one.
|
||||
"""Return a string with the Python expression which ends at the
|
||||
given index, which is empty if there is no real one.
|
||||
"""
|
||||
if not self.is_in_code():
|
||||
raise ValueError("get_expression should only be called if index "\
|
||||
"is inside a code.")
|
||||
raise ValueError("get_expression should only be called"
|
||||
"if index is inside a code.")
|
||||
|
||||
rawtext = self.rawtext
|
||||
bracketing = self.bracketing
|
||||
|
@ -177,20 +182,20 @@ class HyperParser:
|
|||
postdot_phase = True
|
||||
|
||||
while 1:
|
||||
# Eat whitespaces, comments, and if postdot_phase is False - one dot
|
||||
# Eat whitespaces, comments, and if postdot_phase is False - a dot
|
||||
while 1:
|
||||
if pos>brck_limit and rawtext[pos-1] in self._whitespace_chars:
|
||||
# Eat a whitespace
|
||||
pos -= 1
|
||||
elif not postdot_phase and \
|
||||
pos > brck_limit and rawtext[pos-1] == '.':
|
||||
elif (not postdot_phase and
|
||||
pos > brck_limit and rawtext[pos-1] == '.'):
|
||||
# Eat a dot
|
||||
pos -= 1
|
||||
postdot_phase = True
|
||||
# The next line will fail if we are *inside* a comment, but we
|
||||
# shouldn't be.
|
||||
elif pos == brck_limit and brck_index > 0 and \
|
||||
rawtext[bracketing[brck_index-1][0]] == '#':
|
||||
# The next line will fail if we are *inside* a comment,
|
||||
# but we shouldn't be.
|
||||
elif (pos == brck_limit and brck_index > 0 and
|
||||
rawtext[bracketing[brck_index-1][0]] == '#'):
|
||||
# Eat a comment
|
||||
brck_index -= 2
|
||||
brck_limit = bracketing[brck_index][0]
|
||||
|
@ -200,8 +205,8 @@ class HyperParser:
|
|||
break
|
||||
|
||||
if not postdot_phase:
|
||||
# We didn't find a dot, so the expression end at the last
|
||||
# identifier pos.
|
||||
# We didn't find a dot, so the expression end at the
|
||||
# last identifier pos.
|
||||
break
|
||||
|
||||
ret = self._eat_identifier(rawtext, brck_limit, pos)
|
||||
|
@ -209,13 +214,13 @@ class HyperParser:
|
|||
# There is an identifier to eat
|
||||
pos = pos - ret
|
||||
last_identifier_pos = pos
|
||||
# Now, in order to continue the search, we must find a dot.
|
||||
# Now, to continue the search, we must find a dot.
|
||||
postdot_phase = False
|
||||
# (the loop continues now)
|
||||
|
||||
elif pos == brck_limit:
|
||||
# We are at a bracketing limit. If it is a closing bracket,
|
||||
# eat the bracket, otherwise, stop the search.
|
||||
# We are at a bracketing limit. If it is a closing
|
||||
# bracket, eat the bracket, otherwise, stop the search.
|
||||
level = bracketing[brck_index][1]
|
||||
while brck_index > 0 and bracketing[brck_index-1][1] > level:
|
||||
brck_index -= 1
|
||||
|
@ -244,3 +249,8 @@ class HyperParser:
|
|||
break
|
||||
|
||||
return rawtext[last_identifier_pos:self.indexinrawtext]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import unittest
|
||||
unittest.main('idlelib.idle_test.test_hyperparser', verbosity=2)
|
||||
|
|
|
@ -90,7 +90,8 @@ class ParenMatch:
|
|||
self.set_timeout = self.set_timeout_none
|
||||
|
||||
def flash_paren_event(self, event):
|
||||
indices = HyperParser(self.editwin, "insert").get_surrounding_brackets()
|
||||
indices = (HyperParser(self.editwin, "insert")
|
||||
.get_surrounding_brackets())
|
||||
if indices is None:
|
||||
self.warn_mismatched()
|
||||
return
|
||||
|
@ -167,6 +168,11 @@ class ParenMatch:
|
|||
# associate a counter with an event; only disable the "paren"
|
||||
# tag if the event is for the most recent timer.
|
||||
self.counter += 1
|
||||
self.editwin.text_frame.after(self.FLASH_DELAY,
|
||||
lambda self=self, c=self.counter: \
|
||||
self.handle_restore_timer(c))
|
||||
self.editwin.text_frame.after(
|
||||
self.FLASH_DELAY,
|
||||
lambda self=self, c=self.counter: self.handle_restore_timer(c))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import unittest
|
||||
unittest.main('idlelib.idle_test.test_parenmatch', verbosity=2)
|
||||
|
|
|
@ -0,0 +1,191 @@
|
|||
"""Unittest for idlelib.HyperParser"""
|
||||
import unittest
|
||||
from test.support import requires
|
||||
from tkinter import Tk, Text
|
||||
from idlelib.EditorWindow import EditorWindow
|
||||
from idlelib.HyperParser import HyperParser
|
||||
|
||||
class DummyEditwin:
|
||||
def __init__(self, text):
|
||||
self.text = text
|
||||
self.indentwidth = 8
|
||||
self.tabwidth = 8
|
||||
self.context_use_ps1 = True
|
||||
self.num_context_lines = 50, 500, 1000
|
||||
|
||||
_build_char_in_string_func = EditorWindow._build_char_in_string_func
|
||||
is_char_in_string = EditorWindow.is_char_in_string
|
||||
|
||||
|
||||
class HyperParserTest(unittest.TestCase):
|
||||
code = (
|
||||
'"""This is a module docstring"""\n'
|
||||
'# this line is a comment\n'
|
||||
'x = "this is a string"\n'
|
||||
"y = 'this is also a string'\n"
|
||||
'l = [i for i in range(10)]\n'
|
||||
'm = [py*py for # comment\n'
|
||||
' py in l]\n'
|
||||
'x.__len__\n'
|
||||
"z = ((r'asdf')+('a')))\n"
|
||||
'[x for x in\n'
|
||||
'for = False\n'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
requires('gui')
|
||||
cls.root = Tk()
|
||||
cls.text = Text(cls.root)
|
||||
cls.editwin = DummyEditwin(cls.text)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
del cls.text, cls.editwin
|
||||
cls.root.destroy()
|
||||
del cls.root
|
||||
|
||||
def setUp(self):
|
||||
self.text.insert('insert', self.code)
|
||||
|
||||
def tearDown(self):
|
||||
self.text.delete('1.0', 'end')
|
||||
self.editwin.context_use_ps1 = True
|
||||
|
||||
def get_parser(self, index):
|
||||
"""
|
||||
Return a parser object with index at 'index'
|
||||
"""
|
||||
return HyperParser(self.editwin, index)
|
||||
|
||||
def test_init(self):
|
||||
"""
|
||||
test corner cases in the init method
|
||||
"""
|
||||
with self.assertRaises(ValueError) as ve:
|
||||
self.text.tag_add('console', '1.0', '1.end')
|
||||
p = self.get_parser('1.5')
|
||||
self.assertIn('precedes', str(ve.exception))
|
||||
|
||||
# test without ps1
|
||||
self.editwin.context_use_ps1 = False
|
||||
|
||||
# number of lines lesser than 50
|
||||
p = self.get_parser('end')
|
||||
self.assertEqual(p.rawtext, self.text.get('1.0', 'end'))
|
||||
|
||||
# number of lines greater than 50
|
||||
self.text.insert('end', self.text.get('1.0', 'end')*4)
|
||||
p = self.get_parser('54.5')
|
||||
|
||||
def test_is_in_string(self):
|
||||
get = self.get_parser
|
||||
|
||||
p = get('1.0')
|
||||
self.assertFalse(p.is_in_string())
|
||||
p = get('1.4')
|
||||
self.assertTrue(p.is_in_string())
|
||||
p = get('2.3')
|
||||
self.assertFalse(p.is_in_string())
|
||||
p = get('3.3')
|
||||
self.assertFalse(p.is_in_string())
|
||||
p = get('3.7')
|
||||
self.assertTrue(p.is_in_string())
|
||||
p = get('4.6')
|
||||
self.assertTrue(p.is_in_string())
|
||||
|
||||
def test_is_in_code(self):
|
||||
get = self.get_parser
|
||||
|
||||
p = get('1.0')
|
||||
self.assertTrue(p.is_in_code())
|
||||
p = get('1.1')
|
||||
self.assertFalse(p.is_in_code())
|
||||
p = get('2.5')
|
||||
self.assertFalse(p.is_in_code())
|
||||
p = get('3.4')
|
||||
self.assertTrue(p.is_in_code())
|
||||
p = get('3.6')
|
||||
self.assertFalse(p.is_in_code())
|
||||
p = get('4.14')
|
||||
self.assertFalse(p.is_in_code())
|
||||
|
||||
def test_get_surrounding_bracket(self):
|
||||
get = self.get_parser
|
||||
|
||||
def without_mustclose(parser):
|
||||
# a utility function to get surrounding bracket
|
||||
# with mustclose=False
|
||||
return parser.get_surrounding_brackets(mustclose=False)
|
||||
|
||||
def with_mustclose(parser):
|
||||
# a utility function to get surrounding bracket
|
||||
# with mustclose=True
|
||||
return parser.get_surrounding_brackets(mustclose=True)
|
||||
|
||||
p = get('3.2')
|
||||
self.assertIsNone(with_mustclose(p))
|
||||
self.assertIsNone(without_mustclose(p))
|
||||
|
||||
p = get('5.6')
|
||||
self.assertTupleEqual(without_mustclose(p), ('5.4', '5.25'))
|
||||
self.assertTupleEqual(without_mustclose(p), with_mustclose(p))
|
||||
|
||||
p = get('5.23')
|
||||
self.assertTupleEqual(without_mustclose(p), ('5.21', '5.24'))
|
||||
self.assertTupleEqual(without_mustclose(p), with_mustclose(p))
|
||||
|
||||
p = get('6.15')
|
||||
self.assertTupleEqual(without_mustclose(p), ('6.4', '6.end'))
|
||||
self.assertIsNone(with_mustclose(p))
|
||||
|
||||
p = get('9.end')
|
||||
self.assertIsNone(with_mustclose(p))
|
||||
self.assertIsNone(without_mustclose(p))
|
||||
|
||||
def test_get_expression(self):
|
||||
get = self.get_parser
|
||||
|
||||
p = get('4.2')
|
||||
self.assertEqual(p.get_expression(), 'y ')
|
||||
|
||||
p = get('4.7')
|
||||
with self.assertRaises(ValueError) as ve:
|
||||
p.get_expression()
|
||||
self.assertIn('is inside a code', str(ve.exception))
|
||||
|
||||
p = get('5.25')
|
||||
self.assertEqual(p.get_expression(), 'range(10)')
|
||||
|
||||
p = get('6.7')
|
||||
self.assertEqual(p.get_expression(), 'py')
|
||||
|
||||
p = get('6.8')
|
||||
self.assertEqual(p.get_expression(), '')
|
||||
|
||||
p = get('7.9')
|
||||
self.assertEqual(p.get_expression(), 'py')
|
||||
|
||||
p = get('8.end')
|
||||
self.assertEqual(p.get_expression(), 'x.__len__')
|
||||
|
||||
p = get('9.13')
|
||||
self.assertEqual(p.get_expression(), "r'asdf'")
|
||||
|
||||
p = get('9.17')
|
||||
with self.assertRaises(ValueError) as ve:
|
||||
p.get_expression()
|
||||
self.assertIn('is inside a code', str(ve.exception))
|
||||
|
||||
p = get('10.0')
|
||||
self.assertEqual(p.get_expression(), '')
|
||||
|
||||
p = get('11.3')
|
||||
self.assertEqual(p.get_expression(), '')
|
||||
|
||||
p = get('11.11')
|
||||
self.assertEqual(p.get_expression(), 'False')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(verbosity=2)
|
|
@ -0,0 +1,109 @@
|
|||
"""Test idlelib.ParenMatch."""
|
||||
# This must currently be a gui test because ParenMatch methods use
|
||||
# several text methods not defined on idlelib.idle_test.mock_tk.Text.
|
||||
|
||||
import unittest
|
||||
from unittest.mock import Mock
|
||||
from test.support import requires
|
||||
from tkinter import Tk, Text
|
||||
from idlelib.ParenMatch import ParenMatch
|
||||
|
||||
class DummyEditwin:
|
||||
def __init__(self, text):
|
||||
self.text = text
|
||||
self.indentwidth = 8
|
||||
self.tabwidth = 8
|
||||
self.context_use_ps1 = True
|
||||
|
||||
|
||||
class ParenMatchTest(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
requires('gui')
|
||||
cls.root = Tk()
|
||||
cls.text = Text(cls.root)
|
||||
cls.editwin = DummyEditwin(cls.text)
|
||||
cls.editwin.text_frame = Mock()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
del cls.text, cls.editwin
|
||||
cls.root.destroy()
|
||||
del cls.root
|
||||
|
||||
def tearDown(self):
|
||||
self.text.delete('1.0', 'end')
|
||||
|
||||
def test_paren_expression(self):
|
||||
"""
|
||||
Test ParenMatch with 'expression' style.
|
||||
"""
|
||||
text = self.text
|
||||
pm = ParenMatch(self.editwin)
|
||||
pm.set_style('expression')
|
||||
|
||||
text.insert('insert', 'def foobar(a, b')
|
||||
pm.flash_paren_event('event')
|
||||
self.assertIn('<<parenmatch-check-restore>>', text.event_info())
|
||||
self.assertTupleEqual(text.tag_prevrange('paren', 'end'),
|
||||
('1.10', '1.15'))
|
||||
text.insert('insert', ')')
|
||||
pm.restore_event()
|
||||
self.assertNotIn('<<parenmatch-check-restore>>', text.event_info())
|
||||
self.assertEqual(text.tag_prevrange('paren', 'end'), ())
|
||||
|
||||
# paren_closed_event can only be tested as below
|
||||
pm.paren_closed_event('event')
|
||||
self.assertTupleEqual(text.tag_prevrange('paren', 'end'),
|
||||
('1.10', '1.16'))
|
||||
|
||||
def test_paren_default(self):
|
||||
"""
|
||||
Test ParenMatch with 'default' style.
|
||||
"""
|
||||
text = self.text
|
||||
pm = ParenMatch(self.editwin)
|
||||
pm.set_style('default')
|
||||
|
||||
text.insert('insert', 'def foobar(a, b')
|
||||
pm.flash_paren_event('event')
|
||||
self.assertIn('<<parenmatch-check-restore>>', text.event_info())
|
||||
self.assertTupleEqual(text.tag_prevrange('paren', 'end'),
|
||||
('1.10', '1.11'))
|
||||
text.insert('insert', ')')
|
||||
pm.restore_event()
|
||||
self.assertNotIn('<<parenmatch-check-restore>>', text.event_info())
|
||||
self.assertEqual(text.tag_prevrange('paren', 'end'), ())
|
||||
|
||||
def test_paren_corner(self):
|
||||
"""
|
||||
Test corner cases in flash_paren_event and paren_closed_event.
|
||||
|
||||
These cases force conditional expression and alternate paths.
|
||||
"""
|
||||
text = self.text
|
||||
pm = ParenMatch(self.editwin)
|
||||
|
||||
text.insert('insert', '# this is a commen)')
|
||||
self.assertIsNone(pm.paren_closed_event('event'))
|
||||
|
||||
text.insert('insert', '\ndef')
|
||||
self.assertIsNone(pm.flash_paren_event('event'))
|
||||
self.assertIsNone(pm.paren_closed_event('event'))
|
||||
|
||||
text.insert('insert', ' a, *arg)')
|
||||
self.assertIsNone(pm.paren_closed_event('event'))
|
||||
|
||||
def test_handle_restore_timer(self):
|
||||
pm = ParenMatch(self.editwin)
|
||||
pm.restore_event = Mock()
|
||||
pm.handle_restore_timer(0)
|
||||
self.assertTrue(pm.restore_event.called)
|
||||
pm.restore_event.reset_mock()
|
||||
pm.handle_restore_timer(1)
|
||||
self.assertFalse(pm.restore_event.called)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main(verbosity=2)
|
12
Lib/os.py
12
Lib/os.py
|
@ -312,11 +312,12 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
|
|||
|
||||
When topdown is true, the caller can modify the dirnames list in-place
|
||||
(e.g., via del or slice assignment), and walk will only recurse into the
|
||||
subdirectories whose names remain in dirnames; this can be used to prune
|
||||
the search, or to impose a specific order of visiting. Modifying
|
||||
dirnames when topdown is false is ineffective, since the directories in
|
||||
dirnames have already been generated by the time dirnames itself is
|
||||
generated.
|
||||
subdirectories whose names remain in dirnames; this can be used to prune the
|
||||
search, or to impose a specific order of visiting. Modifying dirnames when
|
||||
topdown is false is ineffective, since the directories in dirnames have
|
||||
already been generated by the time dirnames itself is generated. No matter
|
||||
the value of topdown, the list of subdirectories is retrieved before the
|
||||
tuples for the directory and its subdirectories are generated.
|
||||
|
||||
By default errors from the os.listdir() call are ignored. If
|
||||
optional arg 'onerror' is specified, it should be a function; it
|
||||
|
@ -344,6 +345,7 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
|
|||
print("bytes in", len(files), "non-directory files")
|
||||
if 'CVS' in dirs:
|
||||
dirs.remove('CVS') # don't visit CVS directories
|
||||
|
||||
"""
|
||||
|
||||
islink, join, isdir = path.islink, path.join, path.isdir
|
||||
|
|
|
@ -48,7 +48,6 @@ def _get_sep(path):
|
|||
|
||||
def normcase(s):
|
||||
"""Normalize case of pathname. Has no effect under Posix"""
|
||||
# TODO: on Mac OS X, this should really return s.lower().
|
||||
if not isinstance(s, (bytes, str)):
|
||||
raise TypeError("normcase() argument must be str or bytes, "
|
||||
"not '{}'".format(s.__class__.__name__))
|
||||
|
|
|
@ -539,35 +539,39 @@ class ForkingMixIn:
|
|||
|
||||
def collect_children(self):
|
||||
"""Internal routine to wait for children that have exited."""
|
||||
if self.active_children is None: return
|
||||
while len(self.active_children) >= self.max_children:
|
||||
# XXX: This will wait for any child process, not just ones
|
||||
# spawned by this library. This could confuse other
|
||||
# libraries that expect to be able to wait for their own
|
||||
# children.
|
||||
try:
|
||||
pid, status = os.waitpid(0, 0)
|
||||
except OSError:
|
||||
pid = None
|
||||
if pid not in self.active_children: continue
|
||||
self.active_children.remove(pid)
|
||||
if self.active_children is None:
|
||||
return
|
||||
|
||||
# XXX: This loop runs more system calls than it ought
|
||||
# to. There should be a way to put the active_children into a
|
||||
# process group and then use os.waitpid(-pgid) to wait for any
|
||||
# of that set, but I couldn't find a way to allocate pgids
|
||||
# that couldn't collide.
|
||||
for child in self.active_children:
|
||||
# If we're above the max number of children, wait and reap them until
|
||||
# we go back below threshold. Note that we use waitpid(-1) below to be
|
||||
# able to collect children in size(<defunct children>) syscalls instead
|
||||
# of size(<children>): the downside is that this might reap children
|
||||
# which we didn't spawn, which is why we only resort to this when we're
|
||||
# above max_children.
|
||||
while len(self.active_children) >= self.max_children:
|
||||
try:
|
||||
pid, status = os.waitpid(child, os.WNOHANG)
|
||||
pid, _ = os.waitpid(-1, 0)
|
||||
self.active_children.discard(pid)
|
||||
except InterruptedError:
|
||||
pass
|
||||
except ChildProcessError:
|
||||
# we don't have any children, we're done
|
||||
self.active_children.clear()
|
||||
except OSError:
|
||||
pid = None
|
||||
if not pid: continue
|
||||
break
|
||||
|
||||
# Now reap all defunct children.
|
||||
for pid in self.active_children.copy():
|
||||
try:
|
||||
self.active_children.remove(pid)
|
||||
except ValueError as e:
|
||||
raise ValueError('%s. x=%d and list=%r' % (e.message, pid,
|
||||
self.active_children))
|
||||
pid, _ = os.waitpid(pid, os.WNOHANG)
|
||||
# if the child hasn't exited yet, pid will be 0 and ignored by
|
||||
# discard() below
|
||||
self.active_children.discard(pid)
|
||||
except ChildProcessError:
|
||||
# someone else reaped it
|
||||
self.active_children.discard(pid)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def handle_timeout(self):
|
||||
"""Wait for zombies after self.timeout seconds of inactivity.
|
||||
|
@ -589,8 +593,8 @@ class ForkingMixIn:
|
|||
if pid:
|
||||
# Parent process
|
||||
if self.active_children is None:
|
||||
self.active_children = []
|
||||
self.active_children.append(pid)
|
||||
self.active_children = set()
|
||||
self.active_children.add(pid)
|
||||
self.close_request(request)
|
||||
return
|
||||
else:
|
||||
|
|
23
Lib/stat.py
23
Lib/stat.py
|
@ -148,6 +148,29 @@ def filemode(mode):
|
|||
perm.append("-")
|
||||
return "".join(perm)
|
||||
|
||||
|
||||
# Windows FILE_ATTRIBUTE constants for interpreting os.stat()'s
|
||||
# "st_file_attributes" member
|
||||
|
||||
FILE_ATTRIBUTE_ARCHIVE = 32
|
||||
FILE_ATTRIBUTE_COMPRESSED = 2048
|
||||
FILE_ATTRIBUTE_DEVICE = 64
|
||||
FILE_ATTRIBUTE_DIRECTORY = 16
|
||||
FILE_ATTRIBUTE_ENCRYPTED = 16384
|
||||
FILE_ATTRIBUTE_HIDDEN = 2
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768
|
||||
FILE_ATTRIBUTE_NORMAL = 128
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072
|
||||
FILE_ATTRIBUTE_OFFLINE = 4096
|
||||
FILE_ATTRIBUTE_READONLY = 1
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 1024
|
||||
FILE_ATTRIBUTE_SPARSE_FILE = 512
|
||||
FILE_ATTRIBUTE_SYSTEM = 4
|
||||
FILE_ATTRIBUTE_TEMPORARY = 256
|
||||
FILE_ATTRIBUTE_VIRTUAL = 65536
|
||||
|
||||
|
||||
# If available, use C implementation
|
||||
try:
|
||||
from _stat import *
|
||||
|
|
|
@ -155,8 +155,8 @@ def make_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename,
|
|||
script_name = make_script(zip_dir, script_basename, source)
|
||||
unlink.append(script_name)
|
||||
if compiled:
|
||||
init_name = py_compile(init_name, doraise=True)
|
||||
script_name = py_compile(script_name, doraise=True)
|
||||
init_name = py_compile.compile(init_name, doraise=True)
|
||||
script_name = py_compile.compile(script_name, doraise=True)
|
||||
unlink.extend((init_name, script_name))
|
||||
pkg_names = [os.sep.join([pkg_name]*i) for i in range(1, depth+1)]
|
||||
script_name_in_zip = os.path.join(pkg_names[-1], os.path.basename(script_name))
|
||||
|
|
|
@ -19,12 +19,12 @@ MOCK_ANY = mock.ANY
|
|||
PY34 = sys.version_info >= (3, 4)
|
||||
|
||||
|
||||
class BaseEventLoopTests(unittest.TestCase):
|
||||
class BaseEventLoopTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = base_events.BaseEventLoop()
|
||||
self.loop._selector = mock.Mock()
|
||||
asyncio.set_event_loop(None)
|
||||
self.set_event_loop(self.loop)
|
||||
|
||||
def test_not_implemented(self):
|
||||
m = mock.Mock()
|
||||
|
@ -240,30 +240,23 @@ class BaseEventLoopTests(unittest.TestCase):
|
|||
self.loop.set_debug(False)
|
||||
self.assertFalse(self.loop.get_debug())
|
||||
|
||||
@mock.patch('asyncio.base_events.time')
|
||||
@mock.patch('asyncio.base_events.logger')
|
||||
def test__run_once_logging(self, m_logger, m_time):
|
||||
def test__run_once_logging(self, m_logger):
|
||||
def slow_select(timeout):
|
||||
time.sleep(1.0)
|
||||
return []
|
||||
|
||||
# Log to INFO level if timeout > 1.0 sec.
|
||||
idx = -1
|
||||
data = [10.0, 10.0, 12.0, 13.0]
|
||||
|
||||
def monotonic():
|
||||
nonlocal data, idx
|
||||
idx += 1
|
||||
return data[idx]
|
||||
|
||||
m_time.monotonic = monotonic
|
||||
|
||||
self.loop._scheduled.append(
|
||||
asyncio.TimerHandle(11.0, lambda: True, (), self.loop))
|
||||
self.loop._selector.select = slow_select
|
||||
self.loop._process_events = mock.Mock()
|
||||
self.loop._run_once()
|
||||
self.assertEqual(logging.INFO, m_logger.log.call_args[0][0])
|
||||
|
||||
idx = -1
|
||||
data = [10.0, 10.0, 10.3, 13.0]
|
||||
self.loop._scheduled = [asyncio.TimerHandle(11.0, lambda: True, (),
|
||||
self.loop)]
|
||||
def fast_select(timeout):
|
||||
time.sleep(0.001)
|
||||
return []
|
||||
|
||||
self.loop._selector.select = fast_select
|
||||
self.loop._run_once()
|
||||
self.assertEqual(logging.DEBUG, m_logger.log.call_args[0][0])
|
||||
|
||||
|
@ -555,14 +548,11 @@ class MyDatagramProto(asyncio.DatagramProtocol):
|
|||
self.done.set_result(None)
|
||||
|
||||
|
||||
class BaseEventLoopWithSelectorTests(unittest.TestCase):
|
||||
class BaseEventLoopWithSelectorTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.set_event_loop(self.loop)
|
||||
|
||||
@mock.patch('asyncio.base_events.socket')
|
||||
def test_create_connection_multiple_errors(self, m_socket):
|
||||
|
@ -979,6 +969,34 @@ class BaseEventLoopWithSelectorTests(unittest.TestCase):
|
|||
with self.assertRaises(TypeError):
|
||||
self.loop.run_in_executor(None, coroutine_function)
|
||||
|
||||
@mock.patch('asyncio.base_events.logger')
|
||||
def test_log_slow_callbacks(self, m_logger):
|
||||
def stop_loop_cb(loop):
|
||||
loop.stop()
|
||||
|
||||
@asyncio.coroutine
|
||||
def stop_loop_coro(loop):
|
||||
yield from ()
|
||||
loop.stop()
|
||||
|
||||
asyncio.set_event_loop(self.loop)
|
||||
self.loop.set_debug(True)
|
||||
self.loop.slow_callback_duration = 0.0
|
||||
|
||||
# slow callback
|
||||
self.loop.call_soon(stop_loop_cb, self.loop)
|
||||
self.loop.run_forever()
|
||||
fmt, *args = m_logger.warning.call_args[0]
|
||||
self.assertRegex(fmt % tuple(args),
|
||||
"^Executing Handle.*stop_loop_cb.* took .* seconds$")
|
||||
|
||||
# slow task
|
||||
asyncio.async(stop_loop_coro(self.loop), loop=self.loop)
|
||||
self.loop.run_forever()
|
||||
fmt, *args = m_logger.warning.call_args[0]
|
||||
self.assertRegex(fmt % tuple(args),
|
||||
"^Executing Task.*stop_loop_coro.* took .* seconds$")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -224,7 +224,7 @@ class EventLoopTestsMixin:
|
|||
def setUp(self):
|
||||
super().setUp()
|
||||
self.loop = self.create_event_loop()
|
||||
asyncio.set_event_loop(None)
|
||||
self.set_event_loop(self.loop)
|
||||
|
||||
def tearDown(self):
|
||||
# just in case if we have transport close callbacks
|
||||
|
@ -1629,14 +1629,14 @@ class SubprocessTestsMixin:
|
|||
|
||||
if sys.platform == 'win32':
|
||||
|
||||
class SelectEventLoopTests(EventLoopTestsMixin, unittest.TestCase):
|
||||
class SelectEventLoopTests(EventLoopTestsMixin, test_utils.TestCase):
|
||||
|
||||
def create_event_loop(self):
|
||||
return asyncio.SelectorEventLoop()
|
||||
|
||||
class ProactorEventLoopTests(EventLoopTestsMixin,
|
||||
SubprocessTestsMixin,
|
||||
unittest.TestCase):
|
||||
test_utils.TestCase):
|
||||
|
||||
def create_event_loop(self):
|
||||
return asyncio.ProactorEventLoop()
|
||||
|
@ -1691,7 +1691,7 @@ else:
|
|||
if hasattr(selectors, 'KqueueSelector'):
|
||||
class KqueueEventLoopTests(UnixEventLoopTestsMixin,
|
||||
SubprocessTestsMixin,
|
||||
unittest.TestCase):
|
||||
test_utils.TestCase):
|
||||
|
||||
def create_event_loop(self):
|
||||
return asyncio.SelectorEventLoop(
|
||||
|
@ -1716,7 +1716,7 @@ else:
|
|||
if hasattr(selectors, 'EpollSelector'):
|
||||
class EPollEventLoopTests(UnixEventLoopTestsMixin,
|
||||
SubprocessTestsMixin,
|
||||
unittest.TestCase):
|
||||
test_utils.TestCase):
|
||||
|
||||
def create_event_loop(self):
|
||||
return asyncio.SelectorEventLoop(selectors.EpollSelector())
|
||||
|
@ -1724,7 +1724,7 @@ else:
|
|||
if hasattr(selectors, 'PollSelector'):
|
||||
class PollEventLoopTests(UnixEventLoopTestsMixin,
|
||||
SubprocessTestsMixin,
|
||||
unittest.TestCase):
|
||||
test_utils.TestCase):
|
||||
|
||||
def create_event_loop(self):
|
||||
return asyncio.SelectorEventLoop(selectors.PollSelector())
|
||||
|
@ -1732,7 +1732,7 @@ else:
|
|||
# Should always exist.
|
||||
class SelectEventLoopTests(UnixEventLoopTestsMixin,
|
||||
SubprocessTestsMixin,
|
||||
unittest.TestCase):
|
||||
test_utils.TestCase):
|
||||
|
||||
def create_event_loop(self):
|
||||
return asyncio.SelectorEventLoop(selectors.SelectSelector())
|
||||
|
|
|
@ -13,14 +13,10 @@ def _fakefunc(f):
|
|||
return f
|
||||
|
||||
|
||||
class FutureTests(unittest.TestCase):
|
||||
class FutureTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
def test_initial_state(self):
|
||||
f = asyncio.Future(loop=self.loop)
|
||||
|
@ -30,12 +26,9 @@ class FutureTests(unittest.TestCase):
|
|||
self.assertTrue(f.cancelled())
|
||||
|
||||
def test_init_constructor_default_loop(self):
|
||||
try:
|
||||
asyncio.set_event_loop(self.loop)
|
||||
f = asyncio.Future()
|
||||
self.assertIs(f._loop, self.loop)
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def test_constructor_positional(self):
|
||||
# Make sure Future doesn't accept a positional argument
|
||||
|
@ -264,14 +257,10 @@ class FutureTests(unittest.TestCase):
|
|||
self.assertTrue(f2.cancelled())
|
||||
|
||||
|
||||
class FutureDoneCallbackTests(unittest.TestCase):
|
||||
class FutureDoneCallbackTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
def run_briefly(self):
|
||||
test_utils.run_briefly(self.loop)
|
||||
|
|
|
@ -17,14 +17,10 @@ STR_RGX_REPR = (
|
|||
RGX_REPR = re.compile(STR_RGX_REPR)
|
||||
|
||||
|
||||
class LockTests(unittest.TestCase):
|
||||
class LockTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
def test_ctor_loop(self):
|
||||
loop = mock.Mock()
|
||||
|
@ -35,12 +31,9 @@ class LockTests(unittest.TestCase):
|
|||
self.assertIs(lock._loop, self.loop)
|
||||
|
||||
def test_ctor_noloop(self):
|
||||
try:
|
||||
asyncio.set_event_loop(self.loop)
|
||||
lock = asyncio.Lock()
|
||||
self.assertIs(lock._loop, self.loop)
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def test_repr(self):
|
||||
lock = asyncio.Lock(loop=self.loop)
|
||||
|
@ -240,14 +233,10 @@ class LockTests(unittest.TestCase):
|
|||
self.assertFalse(lock.locked())
|
||||
|
||||
|
||||
class EventTests(unittest.TestCase):
|
||||
class EventTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
def test_ctor_loop(self):
|
||||
loop = mock.Mock()
|
||||
|
@ -258,12 +247,9 @@ class EventTests(unittest.TestCase):
|
|||
self.assertIs(ev._loop, self.loop)
|
||||
|
||||
def test_ctor_noloop(self):
|
||||
try:
|
||||
asyncio.set_event_loop(self.loop)
|
||||
ev = asyncio.Event()
|
||||
self.assertIs(ev._loop, self.loop)
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def test_repr(self):
|
||||
ev = asyncio.Event(loop=self.loop)
|
||||
|
@ -376,14 +362,10 @@ class EventTests(unittest.TestCase):
|
|||
self.assertTrue(t.result())
|
||||
|
||||
|
||||
class ConditionTests(unittest.TestCase):
|
||||
class ConditionTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
def test_ctor_loop(self):
|
||||
loop = mock.Mock()
|
||||
|
@ -394,12 +376,9 @@ class ConditionTests(unittest.TestCase):
|
|||
self.assertIs(cond._loop, self.loop)
|
||||
|
||||
def test_ctor_noloop(self):
|
||||
try:
|
||||
asyncio.set_event_loop(self.loop)
|
||||
cond = asyncio.Condition()
|
||||
self.assertIs(cond._loop, self.loop)
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def test_wait(self):
|
||||
cond = asyncio.Condition(loop=self.loop)
|
||||
|
@ -678,14 +657,10 @@ class ConditionTests(unittest.TestCase):
|
|||
self.assertFalse(cond.locked())
|
||||
|
||||
|
||||
class SemaphoreTests(unittest.TestCase):
|
||||
class SemaphoreTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
def test_ctor_loop(self):
|
||||
loop = mock.Mock()
|
||||
|
@ -696,12 +671,9 @@ class SemaphoreTests(unittest.TestCase):
|
|||
self.assertIs(sem._loop, self.loop)
|
||||
|
||||
def test_ctor_noloop(self):
|
||||
try:
|
||||
asyncio.set_event_loop(self.loop)
|
||||
sem = asyncio.Semaphore()
|
||||
self.assertIs(sem._loop, self.loop)
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def test_initial_value_zero(self):
|
||||
sem = asyncio.Semaphore(0, loop=self.loop)
|
||||
|
|
|
@ -12,10 +12,10 @@ from asyncio.proactor_events import _ProactorDuplexPipeTransport
|
|||
from asyncio import test_utils
|
||||
|
||||
|
||||
class ProactorSocketTransportTests(unittest.TestCase):
|
||||
class ProactorSocketTransportTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.proactor = mock.Mock()
|
||||
self.loop._proactor = self.proactor
|
||||
self.protocol = test_utils.make_test_protocol(asyncio.Protocol)
|
||||
|
@ -343,7 +343,7 @@ class ProactorSocketTransportTests(unittest.TestCase):
|
|||
tr.close()
|
||||
|
||||
|
||||
class BaseProactorEventLoopTests(unittest.TestCase):
|
||||
class BaseProactorEventLoopTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.sock = mock.Mock(socket.socket)
|
||||
|
@ -356,6 +356,7 @@ class BaseProactorEventLoopTests(unittest.TestCase):
|
|||
return (self.ssock, self.csock)
|
||||
|
||||
self.loop = EventLoop(self.proactor)
|
||||
self.set_event_loop(self.loop, cleanup=False)
|
||||
|
||||
@mock.patch.object(BaseProactorEventLoop, 'call_soon')
|
||||
@mock.patch.object(BaseProactorEventLoop, '_socketpair')
|
||||
|
|
|
@ -7,14 +7,10 @@ import asyncio
|
|||
from asyncio import test_utils
|
||||
|
||||
|
||||
class _QueueTestBase(unittest.TestCase):
|
||||
class _QueueTestBase(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
|
||||
class QueueBasicTests(_QueueTestBase):
|
||||
|
@ -32,8 +28,7 @@ class QueueBasicTests(_QueueTestBase):
|
|||
self.assertAlmostEqual(0.2, when)
|
||||
yield 0.1
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
q = asyncio.Queue(loop=loop)
|
||||
self.assertTrue(fn(q).startswith('<Queue'), fn(q))
|
||||
|
@ -80,12 +75,9 @@ class QueueBasicTests(_QueueTestBase):
|
|||
self.assertIs(q._loop, self.loop)
|
||||
|
||||
def test_ctor_noloop(self):
|
||||
try:
|
||||
asyncio.set_event_loop(self.loop)
|
||||
q = asyncio.Queue()
|
||||
self.assertIs(q._loop, self.loop)
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def test_repr(self):
|
||||
self._test_repr_or_str(repr, True)
|
||||
|
@ -126,8 +118,7 @@ class QueueBasicTests(_QueueTestBase):
|
|||
self.assertAlmostEqual(0.02, when)
|
||||
yield 0.01
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
q = asyncio.Queue(maxsize=2, loop=loop)
|
||||
self.assertEqual(2, q.maxsize)
|
||||
|
@ -194,8 +185,7 @@ class QueueGetTests(_QueueTestBase):
|
|||
self.assertAlmostEqual(0.01, when)
|
||||
yield 0.01
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
q = asyncio.Queue(loop=loop)
|
||||
started = asyncio.Event(loop=loop)
|
||||
|
@ -241,8 +231,7 @@ class QueueGetTests(_QueueTestBase):
|
|||
self.assertAlmostEqual(0.061, when)
|
||||
yield 0.05
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
q = asyncio.Queue(loop=loop)
|
||||
|
||||
|
@ -302,8 +291,7 @@ class QueuePutTests(_QueueTestBase):
|
|||
self.assertAlmostEqual(0.01, when)
|
||||
yield 0.01
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
q = asyncio.Queue(maxsize=1, loop=loop)
|
||||
started = asyncio.Event(loop=loop)
|
||||
|
@ -339,6 +327,21 @@ class QueuePutTests(_QueueTestBase):
|
|||
q.put_nowait(1)
|
||||
self.assertRaises(asyncio.QueueFull, q.put_nowait, 2)
|
||||
|
||||
def test_float_maxsize(self):
|
||||
q = asyncio.Queue(maxsize=1.3, loop=self.loop)
|
||||
q.put_nowait(1)
|
||||
q.put_nowait(2)
|
||||
self.assertTrue(q.full())
|
||||
self.assertRaises(asyncio.QueueFull, q.put_nowait, 3)
|
||||
|
||||
q = asyncio.Queue(maxsize=1.3, loop=self.loop)
|
||||
@asyncio.coroutine
|
||||
def queue_put():
|
||||
yield from q.put(1)
|
||||
yield from q.put(2)
|
||||
self.assertTrue(q.full())
|
||||
self.loop.run_until_complete(queue_put())
|
||||
|
||||
def test_put_cancelled(self):
|
||||
q = asyncio.Queue(loop=self.loop)
|
||||
|
||||
|
|
|
@ -37,11 +37,12 @@ def list_to_buffer(l=()):
|
|||
return bytearray().join(l)
|
||||
|
||||
|
||||
class BaseSelectorEventLoopTests(unittest.TestCase):
|
||||
class BaseSelectorEventLoopTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
selector = mock.Mock()
|
||||
self.loop = TestBaseSelectorEventLoop(selector)
|
||||
self.set_event_loop(self.loop, cleanup=False)
|
||||
|
||||
def test_make_socket_transport(self):
|
||||
m = mock.Mock()
|
||||
|
@ -107,10 +108,7 @@ class BaseSelectorEventLoopTests(unittest.TestCase):
|
|||
self.assertRaises(RuntimeError, self.loop.add_writer, fd, callback)
|
||||
|
||||
def test_close_no_selector(self):
|
||||
ssock = self.loop._ssock
|
||||
csock = self.loop._csock
|
||||
remove_reader = self.loop.remove_reader = mock.Mock()
|
||||
|
||||
self.loop.remove_reader = mock.Mock()
|
||||
self.loop._selector.close()
|
||||
self.loop._selector = None
|
||||
self.loop.close()
|
||||
|
@ -597,10 +595,10 @@ class BaseSelectorEventLoopTests(unittest.TestCase):
|
|||
self.loop.remove_writer.assert_called_with(1)
|
||||
|
||||
|
||||
class SelectorTransportTests(unittest.TestCase):
|
||||
class SelectorTransportTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.protocol = test_utils.make_test_protocol(asyncio.Protocol)
|
||||
self.sock = mock.Mock(socket.socket)
|
||||
self.sock.fileno.return_value = 7
|
||||
|
@ -684,14 +682,14 @@ class SelectorTransportTests(unittest.TestCase):
|
|||
self.assertEqual(2, sys.getrefcount(self.protocol),
|
||||
pprint.pformat(gc.get_referrers(self.protocol)))
|
||||
self.assertIsNone(tr._loop)
|
||||
self.assertEqual(2, sys.getrefcount(self.loop),
|
||||
self.assertEqual(3, sys.getrefcount(self.loop),
|
||||
pprint.pformat(gc.get_referrers(self.loop)))
|
||||
|
||||
|
||||
class SelectorSocketTransportTests(unittest.TestCase):
|
||||
class SelectorSocketTransportTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.protocol = test_utils.make_test_protocol(asyncio.Protocol)
|
||||
self.sock = mock.Mock(socket.socket)
|
||||
self.sock_fd = self.sock.fileno.return_value = 7
|
||||
|
@ -1061,10 +1059,10 @@ class SelectorSocketTransportTests(unittest.TestCase):
|
|||
|
||||
|
||||
@unittest.skipIf(ssl is None, 'No ssl module')
|
||||
class SelectorSslTransportTests(unittest.TestCase):
|
||||
class SelectorSslTransportTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.protocol = test_utils.make_test_protocol(asyncio.Protocol)
|
||||
self.sock = mock.Mock(socket.socket)
|
||||
self.sock.fileno.return_value = 7
|
||||
|
@ -1396,10 +1394,10 @@ class SelectorSslWithoutSslTransportTests(unittest.TestCase):
|
|||
_SelectorSslTransport(Mock(), Mock(), Mock(), Mock())
|
||||
|
||||
|
||||
class SelectorDatagramTransportTests(unittest.TestCase):
|
||||
class SelectorDatagramTransportTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.protocol = test_utils.make_test_protocol(asyncio.DatagramProtocol)
|
||||
self.sock = mock.Mock(spec_set=socket.socket)
|
||||
self.sock.fileno.return_value = 7
|
||||
|
|
|
@ -15,13 +15,13 @@ import asyncio
|
|||
from asyncio import test_utils
|
||||
|
||||
|
||||
class StreamReaderTests(unittest.TestCase):
|
||||
class StreamReaderTests(test_utils.TestCase):
|
||||
|
||||
DATA = b'line1\nline2\nline3\n'
|
||||
|
||||
def setUp(self):
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(None)
|
||||
self.set_event_loop(self.loop)
|
||||
|
||||
def tearDown(self):
|
||||
# just in case if we have transport close callbacks
|
||||
|
@ -29,6 +29,7 @@ class StreamReaderTests(unittest.TestCase):
|
|||
|
||||
self.loop.close()
|
||||
gc.collect()
|
||||
super().tearDown()
|
||||
|
||||
@mock.patch('asyncio.streams.events')
|
||||
def test_ctor_global_loop(self, m_events):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from asyncio import subprocess
|
||||
from asyncio import test_utils
|
||||
import asyncio
|
||||
import signal
|
||||
import sys
|
||||
|
@ -151,21 +152,21 @@ if sys.platform != 'win32':
|
|||
policy = asyncio.get_event_loop_policy()
|
||||
policy.set_child_watcher(None)
|
||||
self.loop.close()
|
||||
policy.set_event_loop(None)
|
||||
super().tearDown()
|
||||
|
||||
class SubprocessSafeWatcherTests(SubprocessWatcherMixin,
|
||||
unittest.TestCase):
|
||||
test_utils.TestCase):
|
||||
|
||||
Watcher = unix_events.SafeChildWatcher
|
||||
|
||||
class SubprocessFastWatcherTests(SubprocessWatcherMixin,
|
||||
unittest.TestCase):
|
||||
test_utils.TestCase):
|
||||
|
||||
Watcher = unix_events.FastChildWatcher
|
||||
|
||||
else:
|
||||
# Windows
|
||||
class SubprocessProactorTests(SubprocessMixin, unittest.TestCase):
|
||||
class SubprocessProactorTests(SubprocessMixin, test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
|
@ -178,6 +179,7 @@ else:
|
|||
policy = asyncio.get_event_loop_policy()
|
||||
self.loop.close()
|
||||
policy.set_event_loop(None)
|
||||
super().tearDown()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,16 +1,20 @@
|
|||
"""Tests for tasks.py."""
|
||||
|
||||
import gc
|
||||
import os.path
|
||||
import sys
|
||||
import types
|
||||
import unittest
|
||||
import weakref
|
||||
from test.script_helper import assert_python_ok
|
||||
|
||||
import asyncio
|
||||
from asyncio import tasks
|
||||
from asyncio import test_utils
|
||||
|
||||
|
||||
PY35 = (sys.version_info >= (3, 5))
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def coroutine_function():
|
||||
pass
|
||||
|
@ -25,15 +29,10 @@ class Dummy:
|
|||
pass
|
||||
|
||||
|
||||
class TaskTests(unittest.TestCase):
|
||||
class TaskTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
gc.collect()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
def test_task_class(self):
|
||||
@asyncio.coroutine
|
||||
|
@ -46,6 +45,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertIs(t._loop, self.loop)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
self.set_event_loop(loop)
|
||||
t = asyncio.Task(notmuch(), loop=loop)
|
||||
self.assertIs(t._loop, loop)
|
||||
loop.close()
|
||||
|
@ -61,6 +61,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertIs(t._loop, self.loop)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
self.set_event_loop(loop)
|
||||
t = asyncio.async(notmuch(), loop=loop)
|
||||
self.assertIs(t._loop, loop)
|
||||
loop.close()
|
||||
|
@ -76,6 +77,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertIs(f, f_orig)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
self.set_event_loop(loop)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
f = asyncio.async(f_orig, loop=loop)
|
||||
|
@ -97,6 +99,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertIs(t, t_orig)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
self.set_event_loop(loop)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
t = asyncio.async(t_orig, loop=loop)
|
||||
|
@ -116,10 +119,22 @@ class TaskTests(unittest.TestCase):
|
|||
yield from []
|
||||
return 'abc'
|
||||
|
||||
self.assertEqual(notmuch.__name__, 'notmuch')
|
||||
if PY35:
|
||||
self.assertEqual(notmuch.__qualname__,
|
||||
'TaskTests.test_task_repr.<locals>.notmuch')
|
||||
self.assertEqual(notmuch.__module__, __name__)
|
||||
|
||||
filename, lineno = test_utils.get_function_source(notmuch)
|
||||
src = "%s:%s" % (filename, lineno)
|
||||
|
||||
t = asyncio.Task(notmuch(), loop=self.loop)
|
||||
gen = notmuch()
|
||||
self.assertEqual(gen.__name__, 'notmuch')
|
||||
if PY35:
|
||||
self.assertEqual(gen.__qualname__,
|
||||
'TaskTests.test_task_repr.<locals>.notmuch')
|
||||
|
||||
t = asyncio.Task(gen, loop=self.loop)
|
||||
t.add_done_callback(Dummy())
|
||||
self.assertEqual(repr(t),
|
||||
'Task(<notmuch at %s>)<PENDING, [Dummy()]>' % src)
|
||||
|
@ -142,6 +157,12 @@ class TaskTests(unittest.TestCase):
|
|||
def notmuch():
|
||||
pass
|
||||
|
||||
self.assertEqual(notmuch.__name__, 'notmuch')
|
||||
self.assertEqual(notmuch.__module__, __name__)
|
||||
if PY35:
|
||||
self.assertEqual(notmuch.__qualname__,
|
||||
'TaskTests.test_task_repr_custom.<locals>.notmuch')
|
||||
|
||||
class T(asyncio.Future):
|
||||
def __repr__(self):
|
||||
return 'T[]'
|
||||
|
@ -151,13 +172,26 @@ class TaskTests(unittest.TestCase):
|
|||
return super().__repr__()
|
||||
|
||||
gen = notmuch()
|
||||
if PY35 or tasks._DEBUG:
|
||||
# On Python >= 3.5, generators now inherit the name of the
|
||||
# function, as expected, and have a qualified name (__qualname__
|
||||
# attribute). In debug mode, @coroutine decorator uses CoroWrapper
|
||||
# which gets its name (__name__ attribute) from the wrapped
|
||||
# coroutine function.
|
||||
coro_name = 'notmuch'
|
||||
else:
|
||||
# On Python < 3.5, generators inherit the name of the code, not of
|
||||
# the function. See: http://bugs.python.org/issue21205
|
||||
coro_name = 'coro'
|
||||
self.assertEqual(gen.__name__, coro_name)
|
||||
if PY35:
|
||||
self.assertEqual(gen.__qualname__,
|
||||
'TaskTests.test_task_repr_custom.<locals>.notmuch')
|
||||
|
||||
t = MyTask(gen, loop=self.loop)
|
||||
filename = gen.gi_code.co_filename
|
||||
lineno = gen.gi_frame.f_lineno
|
||||
# FIXME: check for the name "coro" instead of "notmuch" because
|
||||
# @asyncio.coroutine drops the name of the wrapped function:
|
||||
# http://bugs.python.org/issue21205
|
||||
self.assertEqual(repr(t), 'T[](<coro at %s:%s>)' % (filename, lineno))
|
||||
self.assertEqual(repr(t), 'T[](<%s at %s:%s>)' % (coro_name, filename, lineno))
|
||||
|
||||
def test_task_basics(self):
|
||||
@asyncio.coroutine
|
||||
|
@ -184,8 +218,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(10.0, when)
|
||||
yield 0
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
@asyncio.coroutine
|
||||
def task():
|
||||
|
@ -310,7 +343,7 @@ class TaskTests(unittest.TestCase):
|
|||
|
||||
def test_cancel_current_task(self):
|
||||
loop = asyncio.new_event_loop()
|
||||
self.addCleanup(loop.close)
|
||||
self.set_event_loop(loop)
|
||||
|
||||
@asyncio.coroutine
|
||||
def task():
|
||||
|
@ -338,8 +371,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.3, when)
|
||||
yield 0.1
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
x = 0
|
||||
waiters = []
|
||||
|
@ -374,8 +406,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.1, when)
|
||||
when = yield 0.1
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
foo_running = None
|
||||
|
||||
|
@ -400,8 +431,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertEqual(foo_running, False)
|
||||
|
||||
def test_wait_for_blocking(self):
|
||||
loop = test_utils.TestLoop()
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop()
|
||||
|
||||
@asyncio.coroutine
|
||||
def coro():
|
||||
|
@ -421,8 +451,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.01, when)
|
||||
yield 0.01
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
@asyncio.coroutine
|
||||
def foo():
|
||||
|
@ -450,8 +479,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.15, when)
|
||||
yield 0.15
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.Task(asyncio.sleep(0.1, loop=loop), loop=loop)
|
||||
b = asyncio.Task(asyncio.sleep(0.15, loop=loop), loop=loop)
|
||||
|
@ -481,8 +509,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.015, when)
|
||||
yield 0.015
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.Task(asyncio.sleep(0.01, loop=loop), loop=loop)
|
||||
b = asyncio.Task(asyncio.sleep(0.015, loop=loop), loop=loop)
|
||||
|
@ -495,11 +522,8 @@ class TaskTests(unittest.TestCase):
|
|||
return 42
|
||||
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
res = loop.run_until_complete(
|
||||
asyncio.Task(foo(), loop=loop))
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
self.assertEqual(res, 42)
|
||||
|
||||
|
@ -537,8 +561,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.1, when)
|
||||
yield 0.1
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.Task(asyncio.sleep(10.0, loop=loop), loop=loop)
|
||||
b = asyncio.Task(asyncio.sleep(0.1, loop=loop), loop=loop)
|
||||
|
@ -593,8 +616,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(10.0, when)
|
||||
yield 0
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
# first_exception, task already has exception
|
||||
a = asyncio.Task(asyncio.sleep(10.0, loop=loop), loop=loop)
|
||||
|
@ -627,8 +649,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.01, when)
|
||||
yield 0.01
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
# first_exception, exception during waiting
|
||||
a = asyncio.Task(asyncio.sleep(10.0, loop=loop), loop=loop)
|
||||
|
@ -660,8 +681,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.15, when)
|
||||
yield 0.15
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.Task(asyncio.sleep(0.1, loop=loop), loop=loop)
|
||||
|
||||
|
@ -697,8 +717,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.11, when)
|
||||
yield 0.11
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.Task(asyncio.sleep(0.1, loop=loop), loop=loop)
|
||||
b = asyncio.Task(asyncio.sleep(0.15, loop=loop), loop=loop)
|
||||
|
@ -728,8 +747,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.1, when)
|
||||
yield 0.1
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.Task(asyncio.sleep(0.1, loop=loop), loop=loop)
|
||||
b = asyncio.Task(asyncio.sleep(0.15, loop=loop), loop=loop)
|
||||
|
@ -753,8 +771,7 @@ class TaskTests(unittest.TestCase):
|
|||
yield 0.01
|
||||
yield 0
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
completed = set()
|
||||
time_shifted = False
|
||||
|
||||
|
@ -797,8 +814,7 @@ class TaskTests(unittest.TestCase):
|
|||
yield 0
|
||||
yield 0.1
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.sleep(0.1, 'a', loop=loop)
|
||||
b = asyncio.sleep(0.15, 'b', loop=loop)
|
||||
|
@ -834,8 +850,7 @@ class TaskTests(unittest.TestCase):
|
|||
yield 0
|
||||
yield 0.01
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.sleep(0.01, 'a', loop=loop)
|
||||
|
||||
|
@ -854,8 +869,7 @@ class TaskTests(unittest.TestCase):
|
|||
yield 0.05
|
||||
yield 0
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.sleep(0.05, 'a', loop=loop)
|
||||
b = asyncio.sleep(0.10, 'b', loop=loop)
|
||||
|
@ -880,8 +894,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.05, when)
|
||||
yield 0.05
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
a = asyncio.sleep(0.05, 'a', loop=loop)
|
||||
b = asyncio.sleep(0.05, 'b', loop=loop)
|
||||
|
@ -922,8 +935,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(0.1, when)
|
||||
yield 0.05
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
@asyncio.coroutine
|
||||
def sleeper(dt, arg):
|
||||
|
@ -944,8 +956,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(10.0, when)
|
||||
yield 0
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
t = asyncio.Task(asyncio.sleep(10.0, 'yeah', loop=loop),
|
||||
loop=loop)
|
||||
|
@ -976,8 +987,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(5000, when)
|
||||
yield 0.1
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
@asyncio.coroutine
|
||||
def sleep(dt):
|
||||
|
@ -1087,8 +1097,7 @@ class TaskTests(unittest.TestCase):
|
|||
self.assertAlmostEqual(10.0, when)
|
||||
yield 0
|
||||
|
||||
loop = test_utils.TestLoop(gen)
|
||||
self.addCleanup(loop.close)
|
||||
loop = self.new_test_loop(gen)
|
||||
|
||||
@asyncio.coroutine
|
||||
def sleeper():
|
||||
|
@ -1500,12 +1509,9 @@ class TaskTests(unittest.TestCase):
|
|||
class GatherTestsBase:
|
||||
|
||||
def setUp(self):
|
||||
self.one_loop = test_utils.TestLoop()
|
||||
self.other_loop = test_utils.TestLoop()
|
||||
|
||||
def tearDown(self):
|
||||
self.one_loop.close()
|
||||
self.other_loop.close()
|
||||
self.one_loop = self.new_test_loop()
|
||||
self.other_loop = self.new_test_loop()
|
||||
self.set_event_loop(self.one_loop, cleanup=False)
|
||||
|
||||
def _run_loop(self, loop):
|
||||
while loop._ready:
|
||||
|
@ -1597,7 +1603,7 @@ class GatherTestsBase:
|
|||
self.assertEqual(stdout.rstrip(), b'False')
|
||||
|
||||
|
||||
class FutureGatherTests(GatherTestsBase, unittest.TestCase):
|
||||
class FutureGatherTests(GatherTestsBase, test_utils.TestCase):
|
||||
|
||||
def wrap_futures(self, *futures):
|
||||
return futures
|
||||
|
@ -1681,16 +1687,12 @@ class FutureGatherTests(GatherTestsBase, unittest.TestCase):
|
|||
cb.assert_called_once_with(fut)
|
||||
|
||||
|
||||
class CoroutineGatherTests(GatherTestsBase, unittest.TestCase):
|
||||
class CoroutineGatherTests(GatherTestsBase, test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
asyncio.set_event_loop(self.one_loop)
|
||||
|
||||
def tearDown(self):
|
||||
asyncio.set_event_loop(None)
|
||||
super().tearDown()
|
||||
|
||||
def wrap_futures(self, *futures):
|
||||
coros = []
|
||||
for fut in futures:
|
||||
|
|
|
@ -29,14 +29,11 @@ MOCK_ANY = mock.ANY
|
|||
|
||||
|
||||
@unittest.skipUnless(signal, 'Signals are not supported')
|
||||
class SelectorEventLoopSignalTests(unittest.TestCase):
|
||||
class SelectorEventLoopSignalTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = asyncio.SelectorEventLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.set_event_loop(self.loop)
|
||||
|
||||
def test_check_signal(self):
|
||||
self.assertRaises(
|
||||
|
@ -208,14 +205,11 @@ class SelectorEventLoopSignalTests(unittest.TestCase):
|
|||
|
||||
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'),
|
||||
'UNIX Sockets are not supported')
|
||||
class SelectorEventLoopUnixSocketTests(unittest.TestCase):
|
||||
class SelectorEventLoopUnixSocketTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = asyncio.SelectorEventLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.set_event_loop(self.loop)
|
||||
|
||||
def test_create_unix_server_existing_path_sock(self):
|
||||
with test_utils.unix_socket_path() as path:
|
||||
|
@ -304,10 +298,10 @@ class SelectorEventLoopUnixSocketTests(unittest.TestCase):
|
|||
self.loop.run_until_complete(coro)
|
||||
|
||||
|
||||
class UnixReadPipeTransportTests(unittest.TestCase):
|
||||
class UnixReadPipeTransportTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.protocol = test_utils.make_test_protocol(asyncio.Protocol)
|
||||
self.pipe = mock.Mock(spec_set=io.RawIOBase)
|
||||
self.pipe.fileno.return_value = 5
|
||||
|
@ -451,7 +445,7 @@ class UnixReadPipeTransportTests(unittest.TestCase):
|
|||
self.assertEqual(2, sys.getrefcount(self.protocol),
|
||||
pprint.pformat(gc.get_referrers(self.protocol)))
|
||||
self.assertIsNone(tr._loop)
|
||||
self.assertEqual(4, sys.getrefcount(self.loop),
|
||||
self.assertEqual(5, sys.getrefcount(self.loop),
|
||||
pprint.pformat(gc.get_referrers(self.loop)))
|
||||
|
||||
def test__call_connection_lost_with_err(self):
|
||||
|
@ -468,14 +462,14 @@ class UnixReadPipeTransportTests(unittest.TestCase):
|
|||
self.assertEqual(2, sys.getrefcount(self.protocol),
|
||||
pprint.pformat(gc.get_referrers(self.protocol)))
|
||||
self.assertIsNone(tr._loop)
|
||||
self.assertEqual(4, sys.getrefcount(self.loop),
|
||||
self.assertEqual(5, sys.getrefcount(self.loop),
|
||||
pprint.pformat(gc.get_referrers(self.loop)))
|
||||
|
||||
|
||||
class UnixWritePipeTransportTests(unittest.TestCase):
|
||||
class UnixWritePipeTransportTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.protocol = test_utils.make_test_protocol(asyncio.BaseProtocol)
|
||||
self.pipe = mock.Mock(spec_set=io.RawIOBase)
|
||||
self.pipe.fileno.return_value = 5
|
||||
|
@ -737,7 +731,7 @@ class UnixWritePipeTransportTests(unittest.TestCase):
|
|||
self.assertEqual(2, sys.getrefcount(self.protocol),
|
||||
pprint.pformat(gc.get_referrers(self.protocol)))
|
||||
self.assertIsNone(tr._loop)
|
||||
self.assertEqual(4, sys.getrefcount(self.loop),
|
||||
self.assertEqual(5, sys.getrefcount(self.loop),
|
||||
pprint.pformat(gc.get_referrers(self.loop)))
|
||||
|
||||
def test__call_connection_lost_with_err(self):
|
||||
|
@ -753,7 +747,7 @@ class UnixWritePipeTransportTests(unittest.TestCase):
|
|||
self.assertEqual(2, sys.getrefcount(self.protocol),
|
||||
pprint.pformat(gc.get_referrers(self.protocol)))
|
||||
self.assertIsNone(tr._loop)
|
||||
self.assertEqual(4, sys.getrefcount(self.loop),
|
||||
self.assertEqual(5, sys.getrefcount(self.loop),
|
||||
pprint.pformat(gc.get_referrers(self.loop)))
|
||||
|
||||
def test_close(self):
|
||||
|
@ -834,7 +828,7 @@ class ChildWatcherTestsMixin:
|
|||
ignore_warnings = mock.patch.object(log.logger, "warning")
|
||||
|
||||
def setUp(self):
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
self.running = False
|
||||
self.zombies = {}
|
||||
|
||||
|
@ -1392,7 +1386,7 @@ class ChildWatcherTestsMixin:
|
|||
|
||||
# attach a new loop
|
||||
old_loop = self.loop
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
patch = mock.patch.object
|
||||
|
||||
with patch(old_loop, "remove_signal_handler") as m_old_remove, \
|
||||
|
@ -1447,7 +1441,7 @@ class ChildWatcherTestsMixin:
|
|||
self.assertFalse(callback3.called)
|
||||
|
||||
# attach a new loop
|
||||
self.loop = test_utils.TestLoop()
|
||||
self.loop = self.new_test_loop()
|
||||
|
||||
with mock.patch.object(
|
||||
self.loop, "add_signal_handler") as m_add_signal_handler:
|
||||
|
@ -1505,12 +1499,12 @@ class ChildWatcherTestsMixin:
|
|||
self.assertFalse(self.watcher._zombies)
|
||||
|
||||
|
||||
class SafeChildWatcherTests (ChildWatcherTestsMixin, unittest.TestCase):
|
||||
class SafeChildWatcherTests (ChildWatcherTestsMixin, test_utils.TestCase):
|
||||
def create_watcher(self):
|
||||
return asyncio.SafeChildWatcher()
|
||||
|
||||
|
||||
class FastChildWatcherTests (ChildWatcherTestsMixin, unittest.TestCase):
|
||||
class FastChildWatcherTests (ChildWatcherTestsMixin, test_utils.TestCase):
|
||||
def create_watcher(self):
|
||||
return asyncio.FastChildWatcher()
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ import _winapi
|
|||
|
||||
import asyncio
|
||||
from asyncio import _overlapped
|
||||
from asyncio import test_utils
|
||||
from asyncio import windows_events
|
||||
|
||||
|
||||
|
@ -26,15 +27,11 @@ class UpperProto(asyncio.Protocol):
|
|||
self.trans.close()
|
||||
|
||||
|
||||
class ProactorTests(unittest.TestCase):
|
||||
class ProactorTests(test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.loop = asyncio.ProactorEventLoop()
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.loop.close()
|
||||
self.loop = None
|
||||
self.set_event_loop(self.loop)
|
||||
|
||||
def test_close(self):
|
||||
a, b = self.loop._socketpair()
|
||||
|
|
|
@ -507,6 +507,11 @@ class TestBasic(unittest.TestCase):
|
|||
for s in ('abcd', range(2000)):
|
||||
self.assertEqual(list(reversed(deque(s))), list(reversed(s)))
|
||||
|
||||
def test_reversed_new(self):
|
||||
klass = type(reversed(deque()))
|
||||
for s in ('abcd', range(2000)):
|
||||
self.assertEqual(list(klass(deque(s))), list(reversed(s)))
|
||||
|
||||
def test_gc_doesnt_blowup(self):
|
||||
import gc
|
||||
# This used to assert-fail in deque_traverse() under a debug
|
||||
|
|
|
@ -1149,7 +1149,7 @@ order (MRO) for bases """
|
|||
except (TypeError, UnicodeEncodeError):
|
||||
pass
|
||||
else:
|
||||
raise TestFailed("[chr(128)] slots not caught")
|
||||
self.fail("[chr(128)] slots not caught")
|
||||
|
||||
# Test leaks
|
||||
class Counted(object):
|
||||
|
|
|
@ -1528,9 +1528,7 @@ class TestStdLib(unittest.TestCase):
|
|||
helper = pydoc.Helper(output=output)
|
||||
helper(self.Color)
|
||||
result = output.getvalue().strip()
|
||||
if result != expected_text:
|
||||
print_diffs(expected_text, result)
|
||||
self.fail("outputs are not equal, see diff above")
|
||||
self.assertEqual(result, expected_text)
|
||||
|
||||
def test_inspect_getmembers(self):
|
||||
values = dict((
|
||||
|
|
|
@ -50,6 +50,45 @@ class FinalizationTest(unittest.TestCase):
|
|||
self.assertEqual(gc.garbage, old_garbage)
|
||||
|
||||
|
||||
class GeneratorTest(unittest.TestCase):
|
||||
|
||||
def test_name(self):
|
||||
def func():
|
||||
yield 1
|
||||
|
||||
# check generator names
|
||||
gen = func()
|
||||
self.assertEqual(gen.__name__, "func")
|
||||
self.assertEqual(gen.__qualname__,
|
||||
"GeneratorTest.test_name.<locals>.func")
|
||||
|
||||
# modify generator names
|
||||
gen.__name__ = "name"
|
||||
gen.__qualname__ = "qualname"
|
||||
self.assertEqual(gen.__name__, "name")
|
||||
self.assertEqual(gen.__qualname__, "qualname")
|
||||
|
||||
# generator names must be a string and cannot be deleted
|
||||
self.assertRaises(TypeError, setattr, gen, '__name__', 123)
|
||||
self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
|
||||
self.assertRaises(TypeError, delattr, gen, '__name__')
|
||||
self.assertRaises(TypeError, delattr, gen, '__qualname__')
|
||||
|
||||
# modify names of the function creating the generator
|
||||
func.__qualname__ = "func_qualname"
|
||||
func.__name__ = "func_name"
|
||||
gen = func()
|
||||
self.assertEqual(gen.__name__, "func_name")
|
||||
self.assertEqual(gen.__qualname__, "func_qualname")
|
||||
|
||||
# unnamed generator
|
||||
gen = (x for x in range(10))
|
||||
self.assertEqual(gen.__name__,
|
||||
"<genexpr>")
|
||||
self.assertEqual(gen.__qualname__,
|
||||
"GeneratorTest.test_name.<locals>.<genexpr>")
|
||||
|
||||
|
||||
tutorial_tests = """
|
||||
Let's try a simple generator:
|
||||
|
||||
|
|
|
@ -390,6 +390,31 @@ class GrammarTests(unittest.TestCase):
|
|||
check_syntax_error(self, "x + 1 = 1")
|
||||
check_syntax_error(self, "a + 1 = b + 2")
|
||||
|
||||
# Check the heuristic for print & exec covers significant cases
|
||||
# As well as placing some limits on false positives
|
||||
def test_former_statements_refer_to_builtins(self):
|
||||
keywords = "print", "exec"
|
||||
# Cases where we want the custom error
|
||||
cases = [
|
||||
"{} foo",
|
||||
"{} {{1:foo}}",
|
||||
"if 1: {} foo",
|
||||
"if 1: {} {{1:foo}}",
|
||||
"if 1:\n {} foo",
|
||||
"if 1:\n {} {{1:foo}}",
|
||||
]
|
||||
for keyword in keywords:
|
||||
custom_msg = "call to '{}'".format(keyword)
|
||||
for case in cases:
|
||||
source = case.format(keyword)
|
||||
with self.subTest(source=source):
|
||||
with self.assertRaisesRegex(SyntaxError, custom_msg):
|
||||
exec(source)
|
||||
source = source.replace("foo", "(foo.)")
|
||||
with self.subTest(source=source):
|
||||
with self.assertRaisesRegex(SyntaxError, "invalid syntax"):
|
||||
exec(source)
|
||||
|
||||
def test_del_stmt(self):
|
||||
# 'del' exprlist
|
||||
abc = [1,2,3]
|
||||
|
|
|
@ -13,8 +13,8 @@ c_heapq = support.import_fresh_module('heapq', fresh=['_heapq'])
|
|||
|
||||
# _heapq.nlargest/nsmallest are saved in heapq._nlargest/_smallest when
|
||||
# _heapq is imported, so check them there
|
||||
func_names = ['heapify', 'heappop', 'heappush', 'heappushpop',
|
||||
'heapreplace', '_heapreplace_max']
|
||||
func_names = ['heapify', 'heappop', 'heappush', 'heappushpop', 'heapreplace',
|
||||
'_heappop_max', '_heapreplace_max', '_heapify_max']
|
||||
|
||||
class TestModules(TestCase):
|
||||
def test_py_functions(self):
|
||||
|
|
|
@ -485,6 +485,11 @@ class CGIHTTPServerTestCase(BaseTestCase):
|
|||
(res.read(), res.getheader('Content-type'), res.status))
|
||||
self.assertEqual(os.environ['SERVER_SOFTWARE'], signature)
|
||||
|
||||
def test_urlquote_decoding_in_cgi_check(self):
|
||||
res = self.request('/cgi-bin%2ffile1.py')
|
||||
self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
|
||||
(res.read(), res.getheader('Content-type'), res.status))
|
||||
|
||||
|
||||
class SocketlessRequestHandler(SimpleHTTPRequestHandler):
|
||||
def __init__(self):
|
||||
|
|
|
@ -1531,6 +1531,13 @@ class MinidomTest(unittest.TestCase):
|
|||
num_children_after = len(doc.childNodes)
|
||||
self.assertTrue(num_children_after == num_children_before - 1)
|
||||
|
||||
def testProcessingInstructionNameError(self):
|
||||
# wrong variable in .nodeValue property will
|
||||
# lead to "NameError: name 'data' is not defined"
|
||||
doc = parse(tstfile)
|
||||
pi = doc.createProcessingInstruction("y", "z")
|
||||
pi.nodeValue = "crash"
|
||||
|
||||
def test_main():
|
||||
run_unittest(MinidomTest)
|
||||
|
||||
|
|
|
@ -530,6 +530,28 @@ class StatAttributeTests(unittest.TestCase):
|
|||
os.stat(r)
|
||||
self.assertEqual(ctx.exception.errno, errno.EBADF)
|
||||
|
||||
def check_file_attributes(self, result):
|
||||
self.assertTrue(hasattr(result, 'st_file_attributes'))
|
||||
self.assertTrue(isinstance(result.st_file_attributes, int))
|
||||
self.assertTrue(0 <= result.st_file_attributes <= 0xFFFFFFFF)
|
||||
|
||||
@unittest.skipUnless(sys.platform == "win32",
|
||||
"st_file_attributes is Win32 specific")
|
||||
def test_file_attributes(self):
|
||||
# test file st_file_attributes (FILE_ATTRIBUTE_DIRECTORY not set)
|
||||
result = os.stat(self.fname)
|
||||
self.check_file_attributes(result)
|
||||
self.assertEqual(
|
||||
result.st_file_attributes & stat.FILE_ATTRIBUTE_DIRECTORY,
|
||||
0)
|
||||
|
||||
# test directory st_file_attributes (FILE_ATTRIBUTE_DIRECTORY set)
|
||||
result = os.stat(support.TESTFN)
|
||||
self.check_file_attributes(result)
|
||||
self.assertEqual(
|
||||
result.st_file_attributes & stat.FILE_ATTRIBUTE_DIRECTORY,
|
||||
stat.FILE_ATTRIBUTE_DIRECTORY)
|
||||
|
||||
from test import mapping_tests
|
||||
|
||||
class EnvironTests(mapping_tests.BasicTestMappingProtocol):
|
||||
|
|
|
@ -402,6 +402,7 @@ class PydocDocTest(unittest.TestCase):
|
|||
"Docstrings are omitted with -O2 and above")
|
||||
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
|
||||
'trace function introduces __locals__ unexpectedly')
|
||||
@requires_docstrings
|
||||
def test_html_doc(self):
|
||||
result, doc_loc = get_pydoc_html(pydoc_mod)
|
||||
mod_file = inspect.getabsfile(pydoc_mod)
|
||||
|
@ -421,6 +422,7 @@ class PydocDocTest(unittest.TestCase):
|
|||
"Docstrings are omitted with -O2 and above")
|
||||
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
|
||||
'trace function introduces __locals__ unexpectedly')
|
||||
@requires_docstrings
|
||||
def test_text_doc(self):
|
||||
result, doc_loc = get_pydoc_text(pydoc_mod)
|
||||
expected_text = expected_text_pattern % (
|
||||
|
@ -495,6 +497,7 @@ class PydocDocTest(unittest.TestCase):
|
|||
'Docstrings are omitted with -O2 and above')
|
||||
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
|
||||
'trace function introduces __locals__ unexpectedly')
|
||||
@requires_docstrings
|
||||
def test_help_output_redirect(self):
|
||||
# issue 940286, if output is set in Helper, then all output from
|
||||
# Helper.help should be redirected
|
||||
|
@ -746,7 +749,7 @@ class TestDescriptions(unittest.TestCase):
|
|||
try:
|
||||
pydoc.render_doc(name)
|
||||
except ImportError:
|
||||
self.fail('finding the doc of {!r} failed'.format(o))
|
||||
self.fail('finding the doc of {!r} failed'.format(name))
|
||||
|
||||
for name in ('notbuiltins', 'strrr', 'strr.translate',
|
||||
'str.trrrranslate', 'builtins.strrr',
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import unittest
|
||||
import os
|
||||
import sys
|
||||
from test.support import TESTFN, import_fresh_module
|
||||
|
||||
c_stat = import_fresh_module('stat', fresh=['_stat'])
|
||||
|
@ -52,6 +53,26 @@ class TestFilemode:
|
|||
'S_IWOTH': 0o002,
|
||||
'S_IXOTH': 0o001}
|
||||
|
||||
# defined by the Windows API documentation
|
||||
file_attributes = {
|
||||
'FILE_ATTRIBUTE_ARCHIVE': 32,
|
||||
'FILE_ATTRIBUTE_COMPRESSED': 2048,
|
||||
'FILE_ATTRIBUTE_DEVICE': 64,
|
||||
'FILE_ATTRIBUTE_DIRECTORY': 16,
|
||||
'FILE_ATTRIBUTE_ENCRYPTED': 16384,
|
||||
'FILE_ATTRIBUTE_HIDDEN': 2,
|
||||
'FILE_ATTRIBUTE_INTEGRITY_STREAM': 32768,
|
||||
'FILE_ATTRIBUTE_NORMAL': 128,
|
||||
'FILE_ATTRIBUTE_NOT_CONTENT_INDEXED': 8192,
|
||||
'FILE_ATTRIBUTE_NO_SCRUB_DATA': 131072,
|
||||
'FILE_ATTRIBUTE_OFFLINE': 4096,
|
||||
'FILE_ATTRIBUTE_READONLY': 1,
|
||||
'FILE_ATTRIBUTE_REPARSE_POINT': 1024,
|
||||
'FILE_ATTRIBUTE_SPARSE_FILE': 512,
|
||||
'FILE_ATTRIBUTE_SYSTEM': 4,
|
||||
'FILE_ATTRIBUTE_TEMPORARY': 256,
|
||||
'FILE_ATTRIBUTE_VIRTUAL': 65536}
|
||||
|
||||
def setUp(self):
|
||||
try:
|
||||
os.remove(TESTFN)
|
||||
|
@ -185,6 +206,14 @@ class TestFilemode:
|
|||
self.assertTrue(callable(func))
|
||||
self.assertEqual(func(0), 0)
|
||||
|
||||
@unittest.skipUnless(sys.platform == "win32",
|
||||
"FILE_ATTRIBUTE_* constants are Win32 specific")
|
||||
def test_file_attribute_constants(self):
|
||||
for key, value in sorted(self.file_attributes.items()):
|
||||
self.assertTrue(hasattr(self.statmod, key), key)
|
||||
modvalue = getattr(self.statmod, key)
|
||||
self.assertEqual(value, modvalue, key)
|
||||
|
||||
|
||||
class TestFilemodeCStat(TestFilemode, unittest.TestCase):
|
||||
statmod = c_stat
|
||||
|
|
|
@ -1934,6 +1934,20 @@ class POSIXProcessTestCase(BaseTestCase):
|
|||
"""Confirm that issue21618 is fixed (may fail under valgrind)."""
|
||||
fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
|
||||
|
||||
# This launches the meat of the test in a child process to
|
||||
# avoid messing with the larger unittest processes maximum
|
||||
# number of file descriptors.
|
||||
# This process launches:
|
||||
# +--> Process that lowers its RLIMIT_NOFILE aftr setting up
|
||||
# a bunch of high open fds above the new lower rlimit.
|
||||
# Those are reported via stdout before launching a new
|
||||
# process with close_fds=False to run the actual test:
|
||||
# +--> The TEST: This one launches a fd_status.py
|
||||
# subprocess with close_fds=True so we can find out if
|
||||
# any of the fds above the lowered rlimit are still open.
|
||||
p = subprocess.Popen([sys.executable, '-c', textwrap.dedent(
|
||||
'''
|
||||
import os, resource, subprocess, sys, textwrap
|
||||
open_fds = set()
|
||||
# Add a bunch more fds to pass down.
|
||||
for _ in range(40):
|
||||
|
@ -1949,12 +1963,15 @@ class POSIXProcessTestCase(BaseTestCase):
|
|||
open_fds.remove(fd)
|
||||
|
||||
for fd in open_fds:
|
||||
self.addCleanup(os.close, fd)
|
||||
#self.addCleanup(os.close, fd)
|
||||
os.set_inheritable(fd, True)
|
||||
|
||||
max_fd_open = max(open_fds)
|
||||
|
||||
import resource
|
||||
# Communicate the open_fds to the parent unittest.TestCase process.
|
||||
print(','.join(map(str, sorted(open_fds))))
|
||||
sys.stdout.flush()
|
||||
|
||||
rlim_cur, rlim_max = resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
try:
|
||||
# 29 is lower than the highest fds we are leaving open.
|
||||
|
@ -1965,22 +1982,27 @@ class POSIXProcessTestCase(BaseTestCase):
|
|||
# An explicit list of fds to check is passed to fd_status.py as
|
||||
# letting fd_status rely on its default logic would miss the
|
||||
# fds above rlim_cur as it normally only checks up to that limit.
|
||||
p = subprocess.Popen(
|
||||
subprocess.Popen(
|
||||
[sys.executable, '-c',
|
||||
textwrap.dedent("""
|
||||
import subprocess, sys
|
||||
subprocess.Popen([sys.executable, {fd_status!r}] +
|
||||
subprocess.Popen([sys.executable, %r] +
|
||||
[str(x) for x in range({max_fd})],
|
||||
close_fds=True).wait()
|
||||
""".format(fd_status=fd_status, max_fd=max_fd_open+1))],
|
||||
stdout=subprocess.PIPE, close_fds=False)
|
||||
""".format(max_fd=max_fd_open+1))],
|
||||
close_fds=False).wait()
|
||||
finally:
|
||||
resource.setrlimit(resource.RLIMIT_NOFILE, (rlim_cur, rlim_max))
|
||||
''' % fd_status)], stdout=subprocess.PIPE)
|
||||
|
||||
output, unused_stderr = p.communicate()
|
||||
remaining_fds = set(map(int, output.strip().split(b',')))
|
||||
output_lines = output.splitlines()
|
||||
self.assertEqual(len(output_lines), 2,
|
||||
msg="expected exactly two lines of output:\n%r" % output)
|
||||
opened_fds = set(map(int, output_lines[0].strip().split(b',')))
|
||||
remaining_fds = set(map(int, output_lines[1].strip().split(b',')))
|
||||
|
||||
self.assertFalse(remaining_fds & open_fds,
|
||||
self.assertFalse(remaining_fds & opened_fds,
|
||||
msg="Some fds were left open.")
|
||||
|
||||
|
||||
|
|
|
@ -885,7 +885,7 @@ class SizeofTest(unittest.TestCase):
|
|||
check(bar, size('PP'))
|
||||
# generator
|
||||
def get_gen(): yield 1
|
||||
check(get_gen(), size('Pb2P'))
|
||||
check(get_gen(), size('Pb2PPP'))
|
||||
# iterator
|
||||
check(iter('abc'), size('lP'))
|
||||
# callable-iterator
|
||||
|
|
|
@ -976,7 +976,7 @@ class ProcessingInstruction(Childless, Node):
|
|||
def _get_nodeValue(self):
|
||||
return self.data
|
||||
def _set_nodeValue(self, value):
|
||||
self.data = data
|
||||
self.data = value
|
||||
nodeValue = property(_get_nodeValue, _set_nodeValue)
|
||||
|
||||
# nodeName is an alias for target
|
||||
|
|
|
@ -174,6 +174,7 @@ Keith Briggs
|
|||
Tobias Brink
|
||||
Richard Brodie
|
||||
Michael Broghton
|
||||
Ammar Brohi
|
||||
Daniel Brotsky
|
||||
Jean Brouwers
|
||||
Gary S. Brown
|
||||
|
@ -308,6 +309,7 @@ Vincent Delft
|
|||
Arnaud Delobelle
|
||||
Konrad Delong
|
||||
Erik Demaine
|
||||
Martin Dengler
|
||||
John Dennis
|
||||
L. Peter Deutsch
|
||||
Roger Dev
|
||||
|
@ -578,6 +580,7 @@ Alan Hourihane
|
|||
Ken Howard
|
||||
Brad Howes
|
||||
Mike Hoy
|
||||
Ben Hoyt
|
||||
Chih-Hao Huang
|
||||
Christian Hudon
|
||||
Lawrence Hudson
|
||||
|
|
34
Misc/NEWS
34
Misc/NEWS
|
@ -10,6 +10,17 @@ Release date: TBA
|
|||
Core and Builtins
|
||||
-----------------
|
||||
|
||||
- Issue #21205: Add a new ``__qualname__`` attribute to generator, the
|
||||
qualified name, and use it in the representation of a generator
|
||||
(``repr(gen)``). The default name of the generator (``__name__`` attribute)
|
||||
is now get from the function instead of the code. Use ``gen.gi_code.co_name``
|
||||
to get the name of the code.
|
||||
|
||||
- Issue #21669: With the aid of heuristics in SyntaxError.__init__, the
|
||||
parser now attempts to generate more meaningful (or at least more search
|
||||
engine friendly) error messages when "exec" and "print" are used as
|
||||
statements.
|
||||
|
||||
- Issue #21642: If the conditional if-else expression, allow an integer written
|
||||
with no space between itself and the ``else`` keyword (e.g. ``True if 42else
|
||||
False``) to be valid syntax.
|
||||
|
@ -92,6 +103,17 @@ Core and Builtins
|
|||
Library
|
||||
-------
|
||||
|
||||
- Issue #21491: socketserver: Fix a race condition in child processes reaping.
|
||||
|
||||
- Issue #21719: Added the ``st_file_attributes`` field to os.stat_result on
|
||||
Windows.
|
||||
|
||||
- Issue #21722: The distutils "upload" command now exits with a non-zero
|
||||
return code when uploading fails. Patch by Martin Dengler.
|
||||
|
||||
- Issue #21723: asyncio.Queue: support any type of number (ex: float) for the
|
||||
maximum size. Patch written by Vajrasky Kok.
|
||||
|
||||
- Issue #21711: support for "site-python" directories has now been removed
|
||||
from the site module (it was deprecated in 3.4).
|
||||
|
||||
|
@ -106,6 +128,9 @@ Library
|
|||
run_forever() and run_until_complete() methods of asyncio.BaseEventLoop now
|
||||
raise an exception if the event loop was closed.
|
||||
|
||||
- Issue #21766: Prevent a security hole in CGIHTTPServer by URL unquoting paths
|
||||
before checking for a CGI script at that path.
|
||||
|
||||
- Issue #21310: Fixed possible resource leak in failed open().
|
||||
|
||||
- Issue #21256: Printout of keyword args should be in deterministic order in
|
||||
|
@ -485,6 +510,15 @@ Extension Modules
|
|||
IDLE
|
||||
----
|
||||
|
||||
- Issue #21686: add unittest for HyperParser. Original patch by Saimadhav
|
||||
Heblikar.
|
||||
|
||||
- Issue #12387: Add missing upper(lower)case versions of default Windows key
|
||||
bindings for Idle so Caps Lock does not disable them. Patch by Roger Serwy.
|
||||
|
||||
- Issue #21695: Closing a Find-in-files output window while the search is
|
||||
still in progress no longer closes Idle.
|
||||
|
||||
- Issue #18910: Add unittest for textView. Patch by Phil Webster.
|
||||
|
||||
- Issue #18292: Add unittest for AutoExpand. Patch by Saihadhav Heblikar.
|
||||
|
|
|
@ -9,7 +9,7 @@ annotated by François Pinard, and converted to C by Raymond Hettinger.
|
|||
#include "Python.h"
|
||||
|
||||
static int
|
||||
_siftdown(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos)
|
||||
siftdown(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos)
|
||||
{
|
||||
PyObject *newitem, *parent;
|
||||
Py_ssize_t parentpos, size;
|
||||
|
@ -48,7 +48,7 @@ _siftdown(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos)
|
|||
}
|
||||
|
||||
static int
|
||||
_siftup(PyListObject *heap, Py_ssize_t pos)
|
||||
siftup(PyListObject *heap, Py_ssize_t pos)
|
||||
{
|
||||
Py_ssize_t startpos, endpos, childpos, rightpos, limit;
|
||||
PyObject *tmp1, *tmp2;
|
||||
|
@ -91,7 +91,7 @@ _siftup(PyListObject *heap, Py_ssize_t pos)
|
|||
pos = childpos;
|
||||
}
|
||||
/* Bubble it up to its final resting place (by sifting its parents down). */
|
||||
return _siftdown(heap, startpos, pos);
|
||||
return siftdown(heap, startpos, pos);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
@ -110,17 +110,16 @@ heappush(PyObject *self, PyObject *args)
|
|||
if (PyList_Append(heap, item) == -1)
|
||||
return NULL;
|
||||
|
||||
if (_siftdown((PyListObject *)heap, 0, PyList_GET_SIZE(heap)-1) == -1)
|
||||
if (siftdown((PyListObject *)heap, 0, PyList_GET_SIZE(heap)-1) == -1)
|
||||
return NULL;
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(heappush_doc,
|
||||
"heappush(heap, item) -> None. Push item onto heap, maintaining the heap invariant.");
|
||||
|
||||
static PyObject *
|
||||
heappop(PyObject *self, PyObject *heap)
|
||||
heappop_internal(PyObject *heap, int siftup_func(PyListObject *, Py_ssize_t))
|
||||
{
|
||||
PyObject *lastelt, *returnitem;
|
||||
Py_ssize_t n;
|
||||
|
@ -130,7 +129,7 @@ heappop(PyObject *self, PyObject *heap)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
/* # raises appropriate IndexError if heap is empty */
|
||||
/* raises IndexError if the heap is empty */
|
||||
n = PyList_GET_SIZE(heap);
|
||||
if (n == 0) {
|
||||
PyErr_SetString(PyExc_IndexError, "index out of range");
|
||||
|
@ -149,18 +148,24 @@ heappop(PyObject *self, PyObject *heap)
|
|||
return lastelt;
|
||||
returnitem = PyList_GET_ITEM(heap, 0);
|
||||
PyList_SET_ITEM(heap, 0, lastelt);
|
||||
if (_siftup((PyListObject *)heap, 0) == -1) {
|
||||
if (siftup_func((PyListObject *)heap, 0) == -1) {
|
||||
Py_DECREF(returnitem);
|
||||
return NULL;
|
||||
}
|
||||
return returnitem;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
heappop(PyObject *self, PyObject *heap)
|
||||
{
|
||||
return heappop_internal(heap, siftup);
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(heappop_doc,
|
||||
"Pop the smallest item off the heap, maintaining the heap invariant.");
|
||||
|
||||
static PyObject *
|
||||
heapreplace(PyObject *self, PyObject *args)
|
||||
heapreplace_internal(PyObject *args, int siftup_func(PyListObject *, Py_ssize_t))
|
||||
{
|
||||
PyObject *heap, *item, *returnitem;
|
||||
|
||||
|
@ -180,13 +185,19 @@ heapreplace(PyObject *self, PyObject *args)
|
|||
returnitem = PyList_GET_ITEM(heap, 0);
|
||||
Py_INCREF(item);
|
||||
PyList_SET_ITEM(heap, 0, item);
|
||||
if (_siftup((PyListObject *)heap, 0) == -1) {
|
||||
if (siftup_func((PyListObject *)heap, 0) == -1) {
|
||||
Py_DECREF(returnitem);
|
||||
return NULL;
|
||||
}
|
||||
return returnitem;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
heapreplace(PyObject *self, PyObject *args)
|
||||
{
|
||||
return heapreplace_internal(args, siftup);
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(heapreplace_doc,
|
||||
"heapreplace(heap, item) -> value. Pop and return the current smallest value, and add the new item.\n\
|
||||
\n\
|
||||
|
@ -227,7 +238,7 @@ heappushpop(PyObject *self, PyObject *args)
|
|||
returnitem = PyList_GET_ITEM(heap, 0);
|
||||
Py_INCREF(item);
|
||||
PyList_SET_ITEM(heap, 0, item);
|
||||
if (_siftup((PyListObject *)heap, 0) == -1) {
|
||||
if (siftup((PyListObject *)heap, 0) == -1) {
|
||||
Py_DECREF(returnitem);
|
||||
return NULL;
|
||||
}
|
||||
|
@ -240,7 +251,7 @@ from the heap. The combined action runs more efficiently than\n\
|
|||
heappush() followed by a separate call to heappop().");
|
||||
|
||||
static PyObject *
|
||||
heapify(PyObject *self, PyObject *heap)
|
||||
heapify_internal(PyObject *heap, int siftup_func(PyListObject *, Py_ssize_t))
|
||||
{
|
||||
Py_ssize_t i, n;
|
||||
|
||||
|
@ -258,17 +269,22 @@ heapify(PyObject *self, PyObject *heap)
|
|||
and that's again n//2-1.
|
||||
*/
|
||||
for (i=n/2-1 ; i>=0 ; i--)
|
||||
if(_siftup((PyListObject *)heap, i) == -1)
|
||||
if(siftup_func((PyListObject *)heap, i) == -1)
|
||||
return NULL;
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
heapify(PyObject *self, PyObject *heap)
|
||||
{
|
||||
return heapify_internal(heap, siftup);
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(heapify_doc,
|
||||
"Transform list into a heap, in-place, in O(len(heap)) time.");
|
||||
|
||||
static int
|
||||
_siftdownmax(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos)
|
||||
siftdown_max(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos)
|
||||
{
|
||||
PyObject *newitem, *parent;
|
||||
Py_ssize_t parentpos, size;
|
||||
|
@ -307,7 +323,7 @@ _siftdownmax(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos)
|
|||
}
|
||||
|
||||
static int
|
||||
_siftupmax(PyListObject *heap, Py_ssize_t pos)
|
||||
siftup_max(PyListObject *heap, Py_ssize_t pos)
|
||||
{
|
||||
Py_ssize_t startpos, endpos, childpos, rightpos, limit;
|
||||
PyObject *tmp1, *tmp2;
|
||||
|
@ -350,39 +366,33 @@ _siftupmax(PyListObject *heap, Py_ssize_t pos)
|
|||
pos = childpos;
|
||||
}
|
||||
/* Bubble it up to its final resting place (by sifting its parents down). */
|
||||
return _siftdownmax(heap, startpos, pos);
|
||||
return siftdown_max(heap, startpos, pos);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
_heapreplace_max(PyObject *self, PyObject *args)
|
||||
heappop_max(PyObject *self, PyObject *heap)
|
||||
{
|
||||
PyObject *heap, *item, *returnitem;
|
||||
return heappop_internal(heap, siftup_max);
|
||||
}
|
||||
|
||||
if (!PyArg_UnpackTuple(args, "_heapreplace_max", 2, 2, &heap, &item))
|
||||
return NULL;
|
||||
PyDoc_STRVAR(heappop_max_doc, "Maxheap variant of heappop.");
|
||||
|
||||
if (!PyList_Check(heap)) {
|
||||
PyErr_SetString(PyExc_TypeError, "heap argument must be a list");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (PyList_GET_SIZE(heap) < 1) {
|
||||
PyErr_SetString(PyExc_IndexError, "index out of range");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
returnitem = PyList_GET_ITEM(heap, 0);
|
||||
Py_INCREF(item);
|
||||
PyList_SET_ITEM(heap, 0, item);
|
||||
if (_siftupmax((PyListObject *)heap, 0) == -1) {
|
||||
Py_DECREF(returnitem);
|
||||
return NULL;
|
||||
}
|
||||
return returnitem;
|
||||
static PyObject *
|
||||
heapreplace_max(PyObject *self, PyObject *args)
|
||||
{
|
||||
return heapreplace_internal(args, siftup_max);
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(heapreplace_max_doc, "Maxheap variant of heapreplace");
|
||||
|
||||
static PyObject *
|
||||
heapify_max(PyObject *self, PyObject *heap)
|
||||
{
|
||||
return heapify_internal(heap, siftup_max);
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(heapify_max_doc, "Maxheap variant of heapify.");
|
||||
|
||||
static PyMethodDef heapq_methods[] = {
|
||||
{"heappush", (PyCFunction)heappush,
|
||||
METH_VARARGS, heappush_doc},
|
||||
|
@ -394,8 +404,12 @@ static PyMethodDef heapq_methods[] = {
|
|||
METH_VARARGS, heapreplace_doc},
|
||||
{"heapify", (PyCFunction)heapify,
|
||||
METH_O, heapify_doc},
|
||||
{"_heapreplace_max",(PyCFunction)_heapreplace_max,
|
||||
{"_heappop_max", (PyCFunction)heappop_max,
|
||||
METH_O, heappop_max_doc},
|
||||
{"_heapreplace_max",(PyCFunction)heapreplace_max,
|
||||
METH_VARARGS, heapreplace_max_doc},
|
||||
{"_heapify_max", (PyCFunction)heapify_max,
|
||||
METH_O, heapify_max_doc},
|
||||
{NULL, NULL} /* sentinel */
|
||||
};
|
||||
|
||||
|
|
|
@ -224,8 +224,8 @@ typedef struct {
|
|||
PyObject_HEAD
|
||||
PyObject *decoder;
|
||||
PyObject *errors;
|
||||
signed int pendingcr: 1;
|
||||
signed int translate: 1;
|
||||
unsigned int pendingcr: 1;
|
||||
unsigned int translate: 1;
|
||||
unsigned int seennl: 3;
|
||||
} nldecoder_object;
|
||||
|
||||
|
@ -546,7 +546,7 @@ incrementalnewlinedecoder_setstate(nldecoder_object *self, PyObject *state)
|
|||
if (!PyArg_Parse(state, "(OK)", &buffer, &flag))
|
||||
return NULL;
|
||||
|
||||
self->pendingcr = (int) flag & 1;
|
||||
self->pendingcr = (int) (flag & 1);
|
||||
flag >>= 1;
|
||||
|
||||
if (self->decoder != Py_None)
|
||||
|
|
|
@ -27,9 +27,21 @@ extern "C" {
|
|||
#endif /* HAVE_SYS_STAT_H */
|
||||
|
||||
#ifdef MS_WINDOWS
|
||||
#include <windows.h>
|
||||
typedef unsigned short mode_t;
|
||||
|
||||
/* FILE_ATTRIBUTE_INTEGRITY_STREAM and FILE_ATTRIBUTE_NO_SCRUB_DATA
|
||||
are not present in VC2010, so define them manually */
|
||||
#ifndef FILE_ATTRIBUTE_INTEGRITY_STREAM
|
||||
# define FILE_ATTRIBUTE_INTEGRITY_STREAM 0x8000
|
||||
#endif
|
||||
|
||||
#ifndef FILE_ATTRIBUTE_NO_SCRUB_DATA
|
||||
# define FILE_ATTRIBUTE_NO_SCRUB_DATA 0x20000
|
||||
#endif
|
||||
|
||||
#endif /* MS_WINDOWS */
|
||||
|
||||
/* From Python's stat.py */
|
||||
#ifndef S_IMODE
|
||||
# define S_IMODE 07777
|
||||
|
@ -473,6 +485,10 @@ ST_SIZE\n\
|
|||
ST_ATIME\n\
|
||||
ST_MTIME\n\
|
||||
ST_CTIME\n\
|
||||
\n"
|
||||
|
||||
"FILE_ATTRIBUTE_*: Windows file attribute constants\n\
|
||||
(only present on Windows)\n\
|
||||
");
|
||||
|
||||
|
||||
|
@ -555,6 +571,26 @@ PyInit__stat(void)
|
|||
if (PyModule_AddIntConstant(m, "ST_MTIME", 8)) return NULL;
|
||||
if (PyModule_AddIntConstant(m, "ST_CTIME", 9)) return NULL;
|
||||
|
||||
#ifdef MS_WINDOWS
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_ARCHIVE)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_COMPRESSED)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_DEVICE)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_DIRECTORY)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_ENCRYPTED)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_HIDDEN)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_INTEGRITY_STREAM)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_NORMAL)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_NO_SCRUB_DATA)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_OFFLINE)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_READONLY)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_REPARSE_POINT)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_SPARSE_FILE)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_SYSTEM)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_TEMPORARY)) return NULL;
|
||||
if (PyModule_AddIntMacro(m, FILE_ATTRIBUTE_VIRTUAL)) return NULL;
|
||||
#endif
|
||||
|
||||
return m;
|
||||
}
|
||||
|
||||
|
|
|
@ -1417,6 +1417,7 @@ win32_wchdir(LPCWSTR path)
|
|||
Therefore, we implement our own stat, based on the Win32 API directly.
|
||||
*/
|
||||
#define HAVE_STAT_NSEC 1
|
||||
#define HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES 1
|
||||
|
||||
struct win32_stat{
|
||||
unsigned long st_dev;
|
||||
|
@ -1433,6 +1434,7 @@ struct win32_stat{
|
|||
int st_mtime_nsec;
|
||||
time_t st_ctime;
|
||||
int st_ctime_nsec;
|
||||
unsigned long st_file_attributes;
|
||||
};
|
||||
|
||||
static __int64 secs_between_epochs = 11644473600; /* Seconds between 1.1.1601 and 1.1.1970 */
|
||||
|
@ -1497,6 +1499,7 @@ attribute_data_to_stat(BY_HANDLE_FILE_INFORMATION *info, ULONG reparse_tag, stru
|
|||
/* now set the bits that make this a symlink */
|
||||
result->st_mode |= S_IFLNK;
|
||||
}
|
||||
result->st_file_attributes = info->dwFileAttributes;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -1960,6 +1963,9 @@ static PyStructSequence_Field stat_result_fields[] = {
|
|||
#endif
|
||||
#ifdef HAVE_STRUCT_STAT_ST_BIRTHTIME
|
||||
{"st_birthtime", "time of creation"},
|
||||
#endif
|
||||
#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
|
||||
{"st_file_attributes", "Windows file attribute bits"},
|
||||
#endif
|
||||
{0}
|
||||
};
|
||||
|
@ -2000,6 +2006,12 @@ static PyStructSequence_Field stat_result_fields[] = {
|
|||
#define ST_BIRTHTIME_IDX ST_GEN_IDX
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
|
||||
#define ST_FILE_ATTRIBUTES_IDX (ST_BIRTHTIME_IDX+1)
|
||||
#else
|
||||
#define ST_FILE_ATTRIBUTES_IDX ST_BIRTHTIME_IDX
|
||||
#endif
|
||||
|
||||
static PyStructSequence_Desc stat_result_desc = {
|
||||
"stat_result", /* name */
|
||||
stat_result__doc__, /* doc */
|
||||
|
@ -2267,6 +2279,10 @@ _pystat_fromstructstat(STRUCT_STAT *st)
|
|||
PyStructSequence_SET_ITEM(v, ST_FLAGS_IDX,
|
||||
PyLong_FromLong((long)st->st_flags));
|
||||
#endif
|
||||
#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
|
||||
PyStructSequence_SET_ITEM(v, ST_FILE_ATTRIBUTES_IDX,
|
||||
PyLong_FromUnsignedLong(st->st_file_attributes));
|
||||
#endif
|
||||
|
||||
if (PyErr_Occurred()) {
|
||||
Py_DECREF(v);
|
||||
|
|
|
@ -1254,6 +1254,9 @@ SimpleExtendsException(PyExc_Exception, AttributeError,
|
|||
* SyntaxError extends Exception
|
||||
*/
|
||||
|
||||
/* Helper function to customise error message for some syntax errors */
|
||||
static int _report_missing_parentheses(PySyntaxErrorObject *self);
|
||||
|
||||
static int
|
||||
SyntaxError_init(PySyntaxErrorObject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
|
@ -1298,6 +1301,13 @@ SyntaxError_init(PySyntaxErrorObject *self, PyObject *args, PyObject *kwds)
|
|||
Py_INCREF(self->text);
|
||||
|
||||
Py_DECREF(info);
|
||||
|
||||
/* Issue #21669: Custom error for 'print' & 'exec' as statements */
|
||||
if (self->text && PyUnicode_Check(self->text)) {
|
||||
if (_report_missing_parentheses(self) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
@ -2783,3 +2793,128 @@ _PyErr_TrySetFromCause(const char *format, ...)
|
|||
PyErr_Restore(new_exc, new_val, new_tb);
|
||||
return new_val;
|
||||
}
|
||||
|
||||
|
||||
/* To help with migration from Python 2, SyntaxError.__init__ applies some
|
||||
* heuristics to try to report a more meaningful exception when print and
|
||||
* exec are used like statements.
|
||||
*
|
||||
* The heuristics are currently expected to detect the following cases:
|
||||
* - top level statement
|
||||
* - statement in a nested suite
|
||||
* - trailing section of a one line complex statement
|
||||
*
|
||||
* They're currently known not to trigger:
|
||||
* - after a semi-colon
|
||||
*
|
||||
* The error message can be a bit odd in cases where the "arguments" are
|
||||
* completely illegal syntactically, but that isn't worth the hassle of
|
||||
* fixing.
|
||||
*
|
||||
* We also can't do anything about cases that are legal Python 3 syntax
|
||||
* but mean something entirely different from what they did in Python 2
|
||||
* (omitting the arguments entirely, printing items preceded by a unary plus
|
||||
* or minus, using the stream redirection syntax).
|
||||
*/
|
||||
|
||||
static int
|
||||
_check_for_legacy_statements(PySyntaxErrorObject *self, Py_ssize_t start)
|
||||
{
|
||||
/* Return values:
|
||||
* -1: an error occurred
|
||||
* 0: nothing happened
|
||||
* 1: the check triggered & the error message was changed
|
||||
*/
|
||||
static PyObject *print_prefix = NULL;
|
||||
static PyObject *exec_prefix = NULL;
|
||||
Py_ssize_t text_len = PyUnicode_GET_LENGTH(self->text);
|
||||
int kind = PyUnicode_KIND(self->text);
|
||||
void *data = PyUnicode_DATA(self->text);
|
||||
|
||||
/* Ignore leading whitespace */
|
||||
while (start < text_len) {
|
||||
Py_UCS4 ch = PyUnicode_READ(kind, data, start);
|
||||
if (!Py_UNICODE_ISSPACE(ch))
|
||||
break;
|
||||
start++;
|
||||
}
|
||||
/* Checking against an empty or whitespace-only part of the string */
|
||||
if (start == text_len) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Check for legacy print statements */
|
||||
if (print_prefix == NULL) {
|
||||
print_prefix = PyUnicode_InternFromString("print ");
|
||||
if (print_prefix == NULL) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
if (PyUnicode_Tailmatch(self->text, print_prefix,
|
||||
start, text_len, -1)) {
|
||||
Py_CLEAR(self->msg);
|
||||
self->msg = PyUnicode_FromString(
|
||||
"Missing parentheses in call to 'print'");
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Check for legacy exec statements */
|
||||
if (exec_prefix == NULL) {
|
||||
exec_prefix = PyUnicode_InternFromString("exec ");
|
||||
if (exec_prefix == NULL) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
if (PyUnicode_Tailmatch(self->text, exec_prefix,
|
||||
start, text_len, -1)) {
|
||||
Py_CLEAR(self->msg);
|
||||
self->msg = PyUnicode_FromString(
|
||||
"Missing parentheses in call to 'exec'");
|
||||
return 1;
|
||||
}
|
||||
/* Fall back to the default error message */
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
_report_missing_parentheses(PySyntaxErrorObject *self)
|
||||
{
|
||||
Py_UCS4 left_paren = 40;
|
||||
Py_ssize_t left_paren_index;
|
||||
Py_ssize_t text_len = PyUnicode_GET_LENGTH(self->text);
|
||||
int legacy_check_result = 0;
|
||||
|
||||
/* Skip entirely if there is an opening parenthesis */
|
||||
left_paren_index = PyUnicode_FindChar(self->text, left_paren,
|
||||
0, text_len, 1);
|
||||
if (left_paren_index < -1) {
|
||||
return -1;
|
||||
}
|
||||
if (left_paren_index != -1) {
|
||||
/* Use default error message for any line with an opening paren */
|
||||
return 0;
|
||||
}
|
||||
/* Handle the simple statement case */
|
||||
legacy_check_result = _check_for_legacy_statements(self, 0);
|
||||
if (legacy_check_result < 0) {
|
||||
return -1;
|
||||
|
||||
}
|
||||
if (legacy_check_result == 0) {
|
||||
/* Handle the one-line complex statement case */
|
||||
Py_UCS4 colon = 58;
|
||||
Py_ssize_t colon_index;
|
||||
colon_index = PyUnicode_FindChar(self->text, colon,
|
||||
0, text_len, 1);
|
||||
if (colon_index < -1) {
|
||||
return -1;
|
||||
}
|
||||
if (colon_index >= 0 && colon_index < text_len) {
|
||||
/* Check again, starting from just after the colon */
|
||||
if (_check_for_legacy_statements(self, colon_index+1) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -12,6 +12,8 @@ gen_traverse(PyGenObject *gen, visitproc visit, void *arg)
|
|||
{
|
||||
Py_VISIT((PyObject *)gen->gi_frame);
|
||||
Py_VISIT(gen->gi_code);
|
||||
Py_VISIT(gen->gi_name);
|
||||
Py_VISIT(gen->gi_qualname);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -58,6 +60,8 @@ gen_dealloc(PyGenObject *gen)
|
|||
_PyObject_GC_UNTRACK(self);
|
||||
Py_CLEAR(gen->gi_frame);
|
||||
Py_CLEAR(gen->gi_code);
|
||||
Py_CLEAR(gen->gi_name);
|
||||
Py_CLEAR(gen->gi_qualname);
|
||||
PyObject_GC_Del(gen);
|
||||
}
|
||||
|
||||
|
@ -418,29 +422,69 @@ static PyObject *
|
|||
gen_repr(PyGenObject *gen)
|
||||
{
|
||||
return PyUnicode_FromFormat("<generator object %S at %p>",
|
||||
((PyCodeObject *)gen->gi_code)->co_name,
|
||||
gen);
|
||||
gen->gi_qualname, gen);
|
||||
}
|
||||
|
||||
|
||||
static PyObject *
|
||||
gen_get_name(PyGenObject *gen)
|
||||
gen_get_name(PyGenObject *op)
|
||||
{
|
||||
PyObject *name = ((PyCodeObject *)gen->gi_code)->co_name;
|
||||
Py_INCREF(name);
|
||||
return name;
|
||||
Py_INCREF(op->gi_name);
|
||||
return op->gi_name;
|
||||
}
|
||||
|
||||
static int
|
||||
gen_set_name(PyGenObject *op, PyObject *value)
|
||||
{
|
||||
PyObject *tmp;
|
||||
|
||||
PyDoc_STRVAR(gen__name__doc__,
|
||||
"Return the name of the generator's associated code object.");
|
||||
/* Not legal to del gen.gi_name or to set it to anything
|
||||
* other than a string object. */
|
||||
if (value == NULL || !PyUnicode_Check(value)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"__name__ must be set to a string object");
|
||||
return -1;
|
||||
}
|
||||
tmp = op->gi_name;
|
||||
Py_INCREF(value);
|
||||
op->gi_name = value;
|
||||
Py_DECREF(tmp);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
gen_get_qualname(PyGenObject *op)
|
||||
{
|
||||
Py_INCREF(op->gi_qualname);
|
||||
return op->gi_qualname;
|
||||
}
|
||||
|
||||
static int
|
||||
gen_set_qualname(PyGenObject *op, PyObject *value)
|
||||
{
|
||||
PyObject *tmp;
|
||||
|
||||
/* Not legal to del gen.__qualname__ or to set it to anything
|
||||
* other than a string object. */
|
||||
if (value == NULL || !PyUnicode_Check(value)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"__qualname__ must be set to a string object");
|
||||
return -1;
|
||||
}
|
||||
tmp = op->gi_qualname;
|
||||
Py_INCREF(value);
|
||||
op->gi_qualname = value;
|
||||
Py_DECREF(tmp);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyGetSetDef gen_getsetlist[] = {
|
||||
{"__name__", (getter)gen_get_name, NULL, gen__name__doc__},
|
||||
{NULL}
|
||||
{"__name__", (getter)gen_get_name, (setter)gen_set_name,
|
||||
PyDoc_STR("name of the generator")},
|
||||
{"__qualname__", (getter)gen_get_qualname, (setter)gen_set_qualname,
|
||||
PyDoc_STR("qualified name of the generator")},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
|
||||
static PyMemberDef gen_memberlist[] = {
|
||||
{"gi_frame", T_OBJECT, offsetof(PyGenObject, gi_frame), READONLY},
|
||||
{"gi_running", T_BOOL, offsetof(PyGenObject, gi_running), READONLY},
|
||||
|
@ -510,7 +554,7 @@ PyTypeObject PyGen_Type = {
|
|||
};
|
||||
|
||||
PyObject *
|
||||
PyGen_New(PyFrameObject *f)
|
||||
PyGen_NewWithQualName(PyFrameObject *f, PyObject *name, PyObject *qualname)
|
||||
{
|
||||
PyGenObject *gen = PyObject_GC_New(PyGenObject, &PyGen_Type);
|
||||
if (gen == NULL) {
|
||||
|
@ -523,10 +567,26 @@ PyGen_New(PyFrameObject *f)
|
|||
gen->gi_code = (PyObject *)(f->f_code);
|
||||
gen->gi_running = 0;
|
||||
gen->gi_weakreflist = NULL;
|
||||
if (name != NULL)
|
||||
gen->gi_name = name;
|
||||
else
|
||||
gen->gi_name = ((PyCodeObject *)gen->gi_code)->co_name;
|
||||
Py_INCREF(gen->gi_name);
|
||||
if (qualname != NULL)
|
||||
gen->gi_qualname = qualname;
|
||||
else
|
||||
gen->gi_qualname = gen->gi_name;
|
||||
Py_INCREF(gen->gi_qualname);
|
||||
_PyObject_GC_TRACK(gen);
|
||||
return (PyObject *)gen;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
PyGen_New(PyFrameObject *f)
|
||||
{
|
||||
return PyGen_NewWithQualName(f, NULL, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
PyGen_NeedsFinalizing(PyGenObject *gen)
|
||||
{
|
||||
|
|
|
@ -186,7 +186,7 @@ def main():
|
|||
|
||||
ssl_dir = sys.argv[1]
|
||||
|
||||
if not os.path.exists(ssl_dir) and os.path.isdir(ssl_dir):
|
||||
if not os.path.isdir(ssl_dir):
|
||||
print(ssl_dir, "is not an existing directory!")
|
||||
sys.exit(1)
|
||||
|
||||
|
|
|
@ -1327,7 +1327,7 @@ builtin_len(PyObject *self, PyObject *v)
|
|||
PyDoc_STRVAR(len_doc,
|
||||
"len(object)\n\
|
||||
\n\
|
||||
Return the number of items of a sequence or mapping.");
|
||||
Return the number of items of a sequence or collection.");
|
||||
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1267,6 +1267,13 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
|
|||
/* Other threads may run now */
|
||||
|
||||
take_gil(tstate);
|
||||
|
||||
/* Check if we should make a quick exit. */
|
||||
if (_Py_Finalizing && _Py_Finalizing != tstate) {
|
||||
drop_gil(tstate);
|
||||
PyThread_exit_thread();
|
||||
}
|
||||
|
||||
if (PyThreadState_Swap(tstate) != NULL)
|
||||
Py_FatalError("ceval: orphan tstate");
|
||||
}
|
||||
|
@ -3401,10 +3408,11 @@ too_many_positional(PyCodeObject *co, int given, int defcount, PyObject **fastlo
|
|||
PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
|
||||
the test in the if statements in Misc/gdbinit (pystack and pystackv). */
|
||||
|
||||
PyObject *
|
||||
PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals,
|
||||
static PyObject *
|
||||
_PyEval_EvalCodeWithName(PyObject *_co, PyObject *globals, PyObject *locals,
|
||||
PyObject **args, int argcount, PyObject **kws, int kwcount,
|
||||
PyObject **defs, int defcount, PyObject *kwdefs, PyObject *closure)
|
||||
PyObject **defs, int defcount, PyObject *kwdefs, PyObject *closure,
|
||||
PyObject *name, PyObject *qualname)
|
||||
{
|
||||
PyCodeObject* co = (PyCodeObject*)_co;
|
||||
PyFrameObject *f;
|
||||
|
@ -3596,7 +3604,7 @@ PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals,
|
|||
|
||||
/* Create a new generator that owns the ready to run frame
|
||||
* and return that as the value. */
|
||||
return PyGen_New(f);
|
||||
return PyGen_NewWithQualName(f, name, qualname);
|
||||
}
|
||||
|
||||
retval = PyEval_EvalFrameEx(f,0);
|
||||
|
@ -3615,6 +3623,16 @@ fail: /* Jump here from prelude on failure */
|
|||
return retval;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals,
|
||||
PyObject **args, int argcount, PyObject **kws, int kwcount,
|
||||
PyObject **defs, int defcount, PyObject *kwdefs, PyObject *closure)
|
||||
{
|
||||
return _PyEval_EvalCodeWithName(_co, globals, locals,
|
||||
args, argcount, kws, kwcount,
|
||||
defs, defcount, kwdefs, closure,
|
||||
NULL, NULL);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
special_lookup(PyObject *o, _Py_Identifier *id)
|
||||
|
@ -4313,6 +4331,8 @@ fast_function(PyObject *func, PyObject ***pp_stack, int n, int na, int nk)
|
|||
PyObject *globals = PyFunction_GET_GLOBALS(func);
|
||||
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
|
||||
PyObject *kwdefs = PyFunction_GET_KW_DEFAULTS(func);
|
||||
PyObject *name = ((PyFunctionObject *)func) -> func_name;
|
||||
PyObject *qualname = ((PyFunctionObject *)func) -> func_qualname;
|
||||
PyObject **d = NULL;
|
||||
int nd = 0;
|
||||
|
||||
|
@ -4355,10 +4375,11 @@ fast_function(PyObject *func, PyObject ***pp_stack, int n, int na, int nk)
|
|||
d = &PyTuple_GET_ITEM(argdefs, 0);
|
||||
nd = Py_SIZE(argdefs);
|
||||
}
|
||||
return PyEval_EvalCodeEx((PyObject*)co, globals,
|
||||
return _PyEval_EvalCodeWithName((PyObject*)co, globals,
|
||||
(PyObject *)NULL, (*pp_stack)-n, na,
|
||||
(*pp_stack)-2*nk, nk, d, nd, kwdefs,
|
||||
PyFunction_GET_CLOSURE(func));
|
||||
PyFunction_GET_CLOSURE(func),
|
||||
name, qualname);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
Loading…
Reference in New Issue