Merge 3.4.3 release engineering changes back into 3.4.

This commit is contained in:
Larry Hastings 2015-02-26 05:58:48 -08:00
commit 8c3ec536e9
89 changed files with 1273 additions and 1168 deletions

View File

@ -1205,7 +1205,7 @@ Here is an example::
{
if (strcmp(name, "data") == 0)
{
return PyInt_FromLong(obj->data);
return PyLong_FromLong(obj->data);
}
PyErr_Format(PyExc_AttributeError,

View File

@ -1,8 +1,9 @@
# Import the email modules we'll need
from email.parser import Parser
# If the e-mail headers are in a file, uncomment this line:
#headers = Parser().parse(open(messagefile, 'r'))
# If the e-mail headers are in a file, uncomment these two lines:
# with open(messagefile) as fp:
# headers = Parser().parse(fp)
# Or for parsing headers in a string, use:
headers = Parser().parsestr('From: <user@example.com>\n'

View File

@ -20,9 +20,8 @@ msg.preamble = 'Our family reunion'
for file in pngfiles:
# Open the files in binary mode. Let the MIMEImage class automatically
# guess the specific image type.
fp = open(file, 'rb')
img = MIMEImage(fp.read())
fp.close()
with open(file, 'rb') as fp:
img = MIMEImage(fp.read())
msg.attach(img)
# Send the email via our own SMTP server.

View File

@ -12,7 +12,8 @@ from email.parser import BytesParser
from imaginary import magic_html_parser
# In a real program you'd get the filename from the arguments.
msg = BytesParser(policy=policy.default).parse(open('outgoing.msg', 'rb'))
with open('outgoing.msg', 'rb') as fp:
msg = BytesParser(policy=policy.default).parse(fp)
# Now the header items can be accessed as a dictionary, and any non-ASCII will
# be converted to unicode:

View File

@ -6,10 +6,9 @@ from email.mime.text import MIMEText
# Open a plain text file for reading. For this example, assume that
# the text file contains only ASCII characters.
fp = open(textfile, 'rb')
# Create a text/plain message
msg = MIMEText(fp.read())
fp.close()
with open(textfile) as fp:
# Create a text/plain message
msg = MIMEText(fp.read())
# me == the sender's email address
# you == the recipient's email address

View File

@ -212,6 +212,7 @@ Example of unhandled exception::
loop = asyncio.get_event_loop()
asyncio.async(bug())
loop.run_forever()
loop.close()
Output::
@ -258,6 +259,7 @@ coroutine in another coroutine and use classic try/except::
loop = asyncio.get_event_loop()
asyncio.async(handle_exception())
loop.run_forever()
loop.close()
Another option is to use the :meth:`BaseEventLoop.run_until_complete`
function::

View File

@ -22,6 +22,8 @@ It provides multiple facilities, amongst which:
Base class of event loops.
This class is :ref:`not thread safe <asyncio-multithreading>`.
Run an event loop
-----------------
@ -104,6 +106,9 @@ keywords to your callback, use :func:`functools.partial`. For example,
Like :meth:`call_soon`, but thread safe.
See the :ref:`concurrency and multithreading <asyncio-multithreading>`
section of the documentation.
.. _asyncio-delayed-calls:
@ -180,7 +185,7 @@ Coroutines
Creating connections
--------------------
.. method:: BaseEventLoop.create_connection(protocol_factory, host=None, port=None, \*, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None)
.. coroutinemethod:: BaseEventLoop.create_connection(protocol_factory, host=None, port=None, \*, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None)
Create a streaming transport connection to a given Internet *host* and
*port*: socket family :py:data:`~socket.AF_INET` or
@ -253,7 +258,7 @@ Creating connections
(:class:`StreamReader`, :class:`StreamWriter`) instead of a protocol.
.. method:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0)
.. coroutinemethod:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0)
Create datagram connection: socket family :py:data:`~socket.AF_INET` or
:py:data:`~socket.AF_INET6` depending on *host* (or *family* if specified),
@ -271,7 +276,7 @@ Creating connections
:ref:`UDP echo server protocol <asyncio-udp-echo-server-protocol>` examples.
.. method:: BaseEventLoop.create_unix_connection(protocol_factory, path, \*, ssl=None, sock=None, server_hostname=None)
.. coroutinemethod:: BaseEventLoop.create_unix_connection(protocol_factory, path, \*, ssl=None, sock=None, server_hostname=None)
Create UNIX connection: socket family :py:data:`~socket.AF_UNIX`, socket
type :py:data:`~socket.SOCK_STREAM`. The :py:data:`~socket.AF_UNIX` socket
@ -290,7 +295,7 @@ Creating connections
Creating listening connections
------------------------------
.. method:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None)
.. coroutinemethod:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None)
Create a TCP server (socket type :data:`~socket.SOCK_STREAM`) bound to
*host* and *port*.
@ -336,11 +341,13 @@ Creating listening connections
:class:`StreamWriter`) pair and calls back a function with this pair.
.. method:: BaseEventLoop.create_unix_server(protocol_factory, path=None, \*, sock=None, backlog=100, ssl=None)
.. coroutinemethod:: BaseEventLoop.create_unix_server(protocol_factory, path=None, \*, sock=None, backlog=100, ssl=None)
Similar to :meth:`BaseEventLoop.create_server`, but specific to the
socket family :py:data:`~socket.AF_UNIX`.
This method is a :ref:`coroutine <coroutine>`.
Availability: UNIX.
@ -384,7 +391,7 @@ the file descriptor of a socket.
Low-level socket operations
---------------------------
.. method:: BaseEventLoop.sock_recv(sock, nbytes)
.. coroutinemethod:: BaseEventLoop.sock_recv(sock, nbytes)
Receive data from the socket. The return value is a bytes object
representing the data received. The maximum amount of data to be received
@ -399,7 +406,7 @@ Low-level socket operations
The :meth:`socket.socket.recv` method.
.. method:: BaseEventLoop.sock_sendall(sock, data)
.. coroutinemethod:: BaseEventLoop.sock_sendall(sock, data)
Send data to the socket. The socket must be connected to a remote socket.
This method continues to send data from *data* until either all data has
@ -416,7 +423,7 @@ Low-level socket operations
The :meth:`socket.socket.sendall` method.
.. method:: BaseEventLoop.sock_connect(sock, address)
.. coroutinemethod:: BaseEventLoop.sock_connect(sock, address)
Connect to a remote socket at *address*.
@ -438,7 +445,7 @@ Low-level socket operations
method.
.. method:: BaseEventLoop.sock_accept(sock)
.. coroutinemethod:: BaseEventLoop.sock_accept(sock)
Accept a connection. The socket must be bound to an address and listening
for connections. The return value is a pair ``(conn, address)`` where *conn*
@ -459,12 +466,12 @@ Low-level socket operations
Resolve host name
-----------------
.. method:: BaseEventLoop.getaddrinfo(host, port, \*, family=0, type=0, proto=0, flags=0)
.. coroutinemethod:: BaseEventLoop.getaddrinfo(host, port, \*, family=0, type=0, proto=0, flags=0)
This method is a :ref:`coroutine <coroutine>`, similar to
:meth:`socket.getaddrinfo` function but non-blocking.
.. method:: BaseEventLoop.getnameinfo(sockaddr, flags=0)
.. coroutinemethod:: BaseEventLoop.getnameinfo(sockaddr, flags=0)
This method is a :ref:`coroutine <coroutine>`, similar to
:meth:`socket.getnameinfo` function but non-blocking.
@ -476,7 +483,7 @@ Connect pipes
On Windows with :class:`SelectorEventLoop`, these methods are not supported.
Use :class:`ProactorEventLoop` to support pipes on Windows.
.. method:: BaseEventLoop.connect_read_pipe(protocol_factory, pipe)
.. coroutinemethod:: BaseEventLoop.connect_read_pipe(protocol_factory, pipe)
Register read pipe in eventloop.
@ -490,7 +497,7 @@ Use :class:`ProactorEventLoop` to support pipes on Windows.
This method is a :ref:`coroutine <coroutine>`.
.. method:: BaseEventLoop.connect_write_pipe(protocol_factory, pipe)
.. coroutinemethod:: BaseEventLoop.connect_write_pipe(protocol_factory, pipe)
Register write pipe in eventloop.
@ -543,7 +550,7 @@ Call a function in an :class:`~concurrent.futures.Executor` (pool of threads or
pool of processes). By default, an event loop uses a thread pool executor
(:class:`~concurrent.futures.ThreadPoolExecutor`).
.. method:: BaseEventLoop.run_in_executor(executor, callback, \*args)
.. coroutinemethod:: BaseEventLoop.run_in_executor(executor, callback, \*args)
Arrange for a callback to be called in the specified executor.
@ -654,7 +661,7 @@ Server
The server is closed asynchonously, use the :meth:`wait_closed` coroutine
to wait until the server is closed.
.. method:: wait_closed()
.. coroutinemethod:: wait_closed()
Wait until the :meth:`close` method completes.

View File

@ -23,6 +23,8 @@ then call the transport's methods for various purposes.
subprocess pipes. The methods available on a transport depend on
the transport's kind.
The transport classes are :ref:`not thread safe <asyncio-multithreading>`.
BaseTransport
-------------

View File

@ -0,0 +1,169 @@
.. currentmodule:: asyncio
Queues
======
Queues:
* :class:`Queue`
* :class:`PriorityQueue`
* :class:`LifoQueue`
* :class:`JoinableQueue`
asyncio queue API was designed to be close to classes of the :mod:`queue`
module (:class:`~queue.Queue`, :class:`~queue.PriorityQueue`,
:class:`~queue.LifoQueue`), but it has no *timeout* parameter. The
:func:`asyncio.wait_for` function can be used to cancel a task after a timeout.
Queue
-----
.. class:: Queue(maxsize=0, \*, loop=None)
A queue, useful for coordinating producer and consumer coroutines.
If *maxsize* is less than or equal to zero, the queue size is infinite. If
it is an integer greater than ``0``, then ``yield from put()`` will block
when the queue reaches *maxsize*, until an item is removed by :meth:`get`.
Unlike the standard library :mod:`queue`, you can reliably know this Queue's
size with :meth:`qsize`, since your single-threaded asyncio application won't
be interrupted between calling :meth:`qsize` and doing an operation on the
Queue.
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. versionchanged:: 3.4.3
New :meth:`join` and :meth:`task_done` methods.
.. method:: empty()
Return ``True`` if the queue is empty, ``False`` otherwise.
.. method:: full()
Return ``True`` if there are :attr:`maxsize` items in the queue.
.. note::
If the Queue was initialized with ``maxsize=0`` (the default), then
:meth:`full()` is never ``True``.
.. coroutinemethod:: get()
Remove and return an item from the queue. If queue is empty, wait until
an item is available.
This method is a :ref:`coroutine <coroutine>`.
.. seealso::
The :meth:`empty` method.
.. method:: get_nowait()
Remove and return an item from the queue.
Return an item if one is immediately available, else raise
:exc:`QueueEmpty`.
.. coroutinemethod:: join()
Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls
:meth:`task_done` to indicate that the item was retrieved and all work on
it is complete. When the count of unfinished tasks drops to zero,
:meth:`join` unblocks.
This method is a :ref:`coroutine <coroutine>`.
.. versionadded:: 3.4.3
.. coroutinemethod:: put(item)
Put an item into the queue. If the queue is full, wait until a free slot
is available before adding item.
This method is a :ref:`coroutine <coroutine>`.
.. seealso::
The :meth:`full` method.
.. method:: put_nowait(item)
Put an item into the queue without blocking.
If no free slot is immediately available, raise :exc:`QueueFull`.
.. method:: qsize()
Number of items in the queue.
.. method:: task_done()
Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a
subsequent call to :meth:`task_done` tells the queue that the processing
on the task is complete.
If a :meth:`join` is currently blocking, it will resume when all items
have been processed (meaning that a :meth:`task_done` call was received
for every item that had been :meth:`~Queue.put` into the queue).
Raises :exc:`ValueError` if called more times than there were items
placed in the queue.
.. versionadded:: 3.4.3
.. attribute:: maxsize
Number of items allowed in the queue.
PriorityQueue
-------------
.. class:: PriorityQueue
A subclass of :class:`Queue`; retrieves entries in priority order (lowest
first).
Entries are typically tuples of the form: (priority number, data).
LifoQueue
---------
.. class:: LifoQueue
A subclass of :class:`Queue` that retrieves most recently added entries
first.
JoinableQueue
^^^^^^^^^^^^^
.. class:: JoinableQueue
Deprecated alias for :class:`Queue`.
.. deprecated:: 3.4.3
Exceptions
^^^^^^^^^^
.. exception:: QueueEmpty
Exception raised when the :meth:`~Queue.get_nowait` method is called on a
:class:`Queue` object which is empty.
.. exception:: QueueFull
Exception raised when the :meth:`~Queue.put_nowait` method is called on a
:class:`Queue` object which is full.

View File

@ -9,7 +9,7 @@ Streams (high-level API)
Stream functions
================
.. function:: open_connection(host=None, port=None, \*, loop=None, limit=None, **kwds)
.. coroutinefunction:: open_connection(host=None, port=None, \*, loop=None, limit=None, \*\*kwds)
A wrapper for :meth:`~BaseEventLoop.create_connection()` returning a (reader,
writer) pair.
@ -32,7 +32,7 @@ Stream functions
This function is a :ref:`coroutine <coroutine>`.
.. function:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, **kwds)
.. coroutinefunction:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, \*\*kwds)
Start a socket server, with a callback for each client connected. The return
value is the same as :meth:`~BaseEventLoop.create_server()`.
@ -56,7 +56,7 @@ Stream functions
This function is a :ref:`coroutine <coroutine>`.
.. function:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds)
.. coroutinefunction:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds)
A wrapper for :meth:`~BaseEventLoop.create_unix_connection()` returning
a (reader, writer) pair.
@ -68,7 +68,7 @@ Stream functions
Availability: UNIX.
.. function:: start_unix_server(client_connected_cb, path=None, \*, loop=None, limit=None, **kwds)
.. coroutinefunction:: start_unix_server(client_connected_cb, path=None, \*, loop=None, limit=None, **kwds)
Start a UNIX Domain Socket server, with a callback for each client connected.
@ -85,6 +85,8 @@ StreamReader
.. class:: StreamReader(limit=None, loop=None)
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. method:: exception()
Get the exception.
@ -106,7 +108,7 @@ StreamReader
Set the transport.
.. method:: read(n=-1)
.. coroutinemethod:: read(n=-1)
Read up to *n* bytes. If *n* is not provided, or set to ``-1``,
read until EOF and return all read bytes.
@ -116,7 +118,7 @@ StreamReader
This method is a :ref:`coroutine <coroutine>`.
.. method:: readline()
.. coroutinemethod:: readline()
Read one line, where "line" is a sequence of bytes ending with ``\n``.
@ -128,7 +130,7 @@ StreamReader
This method is a :ref:`coroutine <coroutine>`.
.. method:: readexactly(n)
.. coroutinemethod:: readexactly(n)
Read exactly *n* bytes. Raise an :exc:`IncompleteReadError` if the end of
the stream is reached before *n* can be read, the
@ -155,6 +157,8 @@ StreamWriter
wait for flow control. It also adds a transport attribute which references
the :class:`Transport` directly.
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. attribute:: transport
Transport.
@ -168,7 +172,7 @@ StreamWriter
Close the transport: see :meth:`BaseTransport.close`.
.. method:: drain()
.. coroutinemethod:: drain()
Let the write buffer of the underlying transport a chance to be flushed.

View File

@ -27,7 +27,7 @@ Example to use it on Windows::
Create a subprocess: high-level API using Process
-------------------------------------------------
.. function:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
.. coroutinefunction:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
Create a subprocess.
@ -39,7 +39,7 @@ Create a subprocess: high-level API using Process
This function is a :ref:`coroutine <coroutine>`.
.. function:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
.. coroutinefunction:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
Run the shell command *cmd*.
@ -67,7 +67,7 @@ Create a subprocess: low-level API using subprocess.Popen
Run subprocesses asynchronously using the :mod:`subprocess` module.
.. method:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
.. coroutinemethod:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
Create a subprocess from one or more string arguments (character strings or
bytes strings encoded to the :ref:`filesystem encoding
@ -116,7 +116,7 @@ Run subprocesses asynchronously using the :mod:`subprocess` module.
See the constructor of the :class:`subprocess.Popen` class for parameters.
.. method:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
.. coroutinemethod:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
Create a subprocess from *cmd*, which is a character string or a bytes
string encoded to the :ref:`filesystem encoding <filesystem-encoding>`,
@ -193,7 +193,10 @@ Process
:meth:`~subprocess.Popen.wait` method of the :class:`~subprocess.Popen`
class is implemented as a busy loop.
.. method:: wait()
This class is :ref:`not thread safe <asyncio-multithreading>`. See also the
:ref:`Subprocess and threads <asyncio-subprocess-threads>` section.
.. coroutinemethod:: wait()
Wait for child process to terminate. Set and return :attr:`returncode`
attribute.
@ -207,7 +210,7 @@ Process
blocks waiting for the OS pipe buffer to accept more data. Use the
:meth:`communicate` method when using pipes to avoid that.
.. method:: communicate(input=None)
.. coroutinemethod:: communicate(input=None)
Interact with process: Send data to stdin. Read data from stdout and
stderr, until end-of-file is reached. Wait for process to terminate.
@ -310,6 +313,8 @@ are limits:
subprocesses from other threads. Call the :func:`get_child_watcher`
function in the main thread to instantiate the child watcher.
The :class:`asyncio.subprocess.Process` class is not thread safe.
.. seealso::
The :ref:`Concurrency and multithreading in asyncio

View File

@ -9,22 +9,16 @@ Locks:
* :class:`Lock`
* :class:`Event`
* :class:`Condition`
Semaphores:
* :class:`Semaphore`
* :class:`BoundedSemaphore`
Queues:
* :class:`Queue`
* :class:`PriorityQueue`
* :class:`LifoQueue`
* :class:`JoinableQueue`
asyncio locks and queues API were designed to be close to classes of the
:mod:`threading` module (:class:`~threading.Lock`, :class:`~threading.Event`,
asyncio lock API was designed to be close to classes of the :mod:`threading`
module (:class:`~threading.Lock`, :class:`~threading.Event`,
:class:`~threading.Condition`, :class:`~threading.Semaphore`,
:class:`~threading.BoundedSemaphore`) and the :mod:`queue` module
(:class:`~queue.Queue`, :class:`~queue.PriorityQueue`,
:class:`~queue.LifoQueue`), but they have no *timeout* parameter. The
:class:`~threading.BoundedSemaphore`), but it has no *timeout* parameter. The
:func:`asyncio.wait_for` function can be used to cancel a task after a timeout.
Locks
@ -60,6 +54,8 @@ Lock
Locks also support the context management protocol. ``(yield from lock)``
should be used as context manager expression.
This class is :ref:`not thread safe <asyncio-multithreading>`.
Usage::
lock = Lock()
@ -89,7 +85,7 @@ Lock
Return ``True`` if the lock is acquired.
.. method:: acquire()
.. coroutinemethod:: acquire()
Acquire a lock.
@ -123,6 +119,8 @@ Event
method. The :meth:`wait` method blocks until the flag is true. The flag is
initially false.
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. method:: clear()
Reset the internal flag to false. Subsequently, coroutines calling
@ -139,7 +137,7 @@ Event
true are awakened. Coroutine that call :meth:`wait` once the flag is true
will not block at all.
.. method:: wait()
.. coroutinemethod:: wait()
Block until the internal flag is true.
@ -166,7 +164,9 @@ Condition
object, and it is used as the underlying lock. Otherwise,
a new :class:`Lock` object is created and used as the underlying lock.
.. method:: acquire()
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. coroutinemethod:: acquire()
Acquire the underlying lock.
@ -213,7 +213,7 @@ Condition
There is no return value.
.. method:: wait()
.. coroutinemethod:: wait()
Wait until notified.
@ -227,7 +227,7 @@ Condition
This method is a :ref:`coroutine <coroutine>`.
.. method:: wait_for(predicate)
.. coroutinemethod:: wait_for(predicate)
Wait until a predicate becomes true.
@ -258,7 +258,9 @@ Semaphore
defaults to ``1``. If the value given is less than ``0``, :exc:`ValueError`
is raised.
.. method:: acquire()
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. coroutinemethod:: acquire()
Acquire a semaphore.
@ -273,7 +275,7 @@ Semaphore
Returns ``True`` if semaphore can not be acquired immediately.
.. method:: release()
.. coroutinemethod:: release()
Release a semaphore, incrementing the internal counter by one. When it
was zero on entry and another coroutine is waiting for it to become
@ -285,154 +287,8 @@ BoundedSemaphore
.. class:: BoundedSemaphore(value=1, \*, loop=None)
A bounded semaphore implementation. Inherit from :class:`Semaphore`.
A bounded semaphore implementation. Inherit from :class:`Semaphore`.
This raises :exc:`ValueError` in :meth:`~Semaphore.release` if it would
increase the value above the initial value.
This raises :exc:`ValueError` in :meth:`~Semaphore.release` if it would
increase the value above the initial value.
Queues
------
Queue
^^^^^
.. class:: Queue(maxsize=0, \*, loop=None)
A queue, useful for coordinating producer and consumer coroutines.
If *maxsize* is less than or equal to zero, the queue size is infinite. If
it is an integer greater than ``0``, then ``yield from put()`` will block
when the queue reaches *maxsize*, until an item is removed by :meth:`get`.
Unlike the standard library :mod:`queue`, you can reliably know this Queue's
size with :meth:`qsize`, since your single-threaded asyncio application won't
be interrupted between calling :meth:`qsize` and doing an operation on the
Queue.
.. method:: empty()
Return ``True`` if the queue is empty, ``False`` otherwise.
.. method:: full()
Return ``True`` if there are :attr:`maxsize` items in the queue.
.. note::
If the Queue was initialized with ``maxsize=0`` (the default), then
:meth:`full()` is never ``True``.
.. method:: get()
Remove and return an item from the queue. If queue is empty, wait until
an item is available.
This method is a :ref:`coroutine <coroutine>`.
.. seealso::
The :meth:`empty` method.
.. method:: get_nowait()
Remove and return an item from the queue.
Return an item if one is immediately available, else raise
:exc:`QueueEmpty`.
.. method:: put(item)
Put an item into the queue. If the queue is full, wait until a free slot
is available before adding item.
This method is a :ref:`coroutine <coroutine>`.
.. seealso::
The :meth:`full` method.
.. method:: put_nowait(item)
Put an item into the queue without blocking.
If no free slot is immediately available, raise :exc:`QueueFull`.
.. method:: qsize()
Number of items in the queue.
.. attribute:: maxsize
Number of items allowed in the queue.
PriorityQueue
^^^^^^^^^^^^^
.. class:: PriorityQueue
A subclass of :class:`Queue`; retrieves entries in priority order (lowest
first).
Entries are typically tuples of the form: (priority number, data).
LifoQueue
^^^^^^^^^
.. class:: LifoQueue
A subclass of :class:`Queue` that retrieves most recently added entries
first.
JoinableQueue
^^^^^^^^^^^^^
.. class:: JoinableQueue
A subclass of :class:`Queue` with :meth:`task_done` and :meth:`join`
methods.
.. method:: join()
Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls
:meth:`task_done` to indicate that the item was retrieved and all work on
it is complete. When the count of unfinished tasks drops to zero,
:meth:`join` unblocks.
This method is a :ref:`coroutine <coroutine>`.
.. method:: task_done()
Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a
subsequent call to :meth:`task_done` tells the queue that the processing
on the task is complete.
If a :meth:`join` is currently blocking, it will resume when all items
have been processed (meaning that a :meth:`task_done` call was received
for every item that had been :meth:`~Queue.put` into the queue).
Raises :exc:`ValueError` if called more times than there were items
placed in the queue.
Exceptions
^^^^^^^^^^
.. exception:: QueueEmpty
Exception raised when the :meth:`~Queue.get_nowait` method is called on a
:class:`Queue` object which is empty.
.. exception:: QueueFull
Exception raised when the :meth:`~Queue.put_nowait` method is called on a
:class:`Queue` object which is full.

View File

@ -209,6 +209,8 @@ Future
:func:`~concurrent.futures.as_completed` functions in the
:mod:`concurrent.futures` package.
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. method:: cancel()
Cancel the future and schedule callbacks.
@ -375,6 +377,8 @@ Task
Don't directly create :class:`Task` instances: use the :func:`async`
function or the :meth:`BaseEventLoop.create_task` method.
This class is :ref:`not thread safe <asyncio-multithreading>`.
.. classmethod:: all_tasks(loop=None)
Return a set of all tasks for an event loop.
@ -545,7 +549,7 @@ Task functions
Return ``True`` if *func* is a decorated :ref:`coroutine function
<coroutine>`.
.. function:: sleep(delay, result=None, \*, loop=None)
.. coroutinefunction:: sleep(delay, result=None, \*, loop=None)
Create a :ref:`coroutine <coroutine>` that completes after a given
time (in seconds). If *result* is provided, it is produced to the caller
@ -554,6 +558,8 @@ Task functions
The resolution of the sleep depends on the :ref:`granularity of the event
loop <asyncio-delayed-calls>`.
This function is a :ref:`coroutine <coroutine>`.
.. function:: shield(arg, \*, loop=None)
Wait for a future, shielding it from cancellation.
@ -581,7 +587,7 @@ Task functions
except CancelledError:
res = None
.. function:: wait(futures, \*, loop=None, timeout=None, return_when=ALL_COMPLETED)
.. coroutinefunction:: wait(futures, \*, loop=None, timeout=None, return_when=ALL_COMPLETED)
Wait for the Futures and coroutine objects given by the sequence *futures*
to complete. Coroutines will be wrapped in Tasks. Returns two sets of
@ -626,7 +632,7 @@ Task functions
when the timeout occurs are returned in the second set.
.. function:: wait_for(fut, timeout, \*, loop=None)
.. coroutinefunction:: wait_for(fut, timeout, \*, loop=None)
Wait for the single :class:`Future` or :ref:`coroutine object <coroutine>`
to complete with timeout. If *timeout* is ``None``, block until the future

View File

@ -46,6 +46,11 @@ Here is a more detailed list of the package contents:
you absolutely, positively have to use a library that makes blocking
I/O calls.
Asynchronous programming is more complex than classical "sequential"
programming: see the :ref:`Develop with asyncio <asyncio-dev>` page which lists
common traps and explains how to avoid them. :ref:`Enable the debug mode
<asyncio-debug-mode>` during development to detect common issues.
Table of contents:
.. toctree::
@ -58,6 +63,7 @@ Table of contents:
asyncio-stream.rst
asyncio-subprocess.rst
asyncio-sync.rst
asyncio-queue.rst
asyncio-dev.rst
.. seealso::

View File

@ -65,9 +65,6 @@ The :mod:`binascii` module defines the following functions:
data. More than one line may be passed at a time. If the optional argument
*header* is present and true, underscores will be decoded as spaces.
.. versionchanged:: 3.2
Accept only bytestring or bytearray objects as input.
.. function:: b2a_qp(data, quotetabs=False, istext=True, header=False)
@ -156,9 +153,6 @@ The :mod:`binascii` module defines the following functions:
of hexadecimal digits (which can be upper or lower case), otherwise a
:exc:`TypeError` is raised.
.. versionchanged:: 3.2
Accept only bytestring or bytearray objects as input.
.. exception:: Error

View File

@ -9,7 +9,7 @@
module: pickle
module: copy
The :mod:`copyreg` module offers a way to define fuctions used while pickling
The :mod:`copyreg` module offers a way to define functions used while pickling
specific objects. The :mod:`pickle` and :mod:`copy` modules use those functions
when pickling/copying those objects. The module provides configuration
information about object constructors which are not classes.

View File

@ -169,6 +169,12 @@ The following exceptions are raised as appropriate:
status code that we don't understand.
.. exception:: LineTooLong
A subclass of :exc:`HTTPException`. Raised if an excessively long line
is received in the HTTP protocol from the server.
The constants defined in this module are:
.. data:: HTTP_PORT

View File

@ -64,6 +64,18 @@ of which this module provides three different variants:
Contains the server instance.
.. attribute:: close_connection
Boolean that should be set before :meth:`handle_one_request` returns,
indicating if another request may be expected, or if the connection should
be shut down.
.. attribute:: requestline
Contains the string representation of the HTTP request line. The
terminating CRLF is stripped. This attribute should be set by
:meth:`handle_one_request`. If no valid request line was processed, it
should be set to the empty string.
.. attribute:: command

View File

@ -106,8 +106,8 @@ behavior of the module.
extension is already known, the new type will replace the old one. When the type
is already known the extension will be added to the list of known extensions.
When *strict* is ``True`` (the default), the mapping will added to the official MIME
types, otherwise to the non-standard ones.
When *strict* is ``True`` (the default), the mapping will be added to the
official MIME types, otherwise to the non-standard ones.
.. data:: inited

View File

@ -2159,7 +2159,8 @@ features:
contain :func:`os.access`, otherwise it will be empty.
To check whether you can use the *effective_ids* parameter for
:func:`os.access`, use the ``in`` operator on ``supports_dir_fd``, like so::
:func:`os.access`, use the ``in`` operator on ``supports_effective_ids``,
like so::
os.access in os.supports_effective_ids

View File

@ -281,7 +281,9 @@ The special characters are:
assertion`. ``(?<=abc)def`` will find a match in ``abcdef``, since the
lookbehind will back up 3 characters and check if the contained pattern matches.
The contained pattern must only match strings of some fixed length, meaning that
``abc`` or ``a|b`` are allowed, but ``a*`` and ``a{3,4}`` are not. Note that
``abc`` or ``a|b`` are allowed, but ``a*`` and ``a{3,4}`` are not. Group
references are not supported even if they match strings of some fixed length.
Note that
patterns which start with positive lookbehind assertions will not match at the
beginning of the string being searched; you will most likely want to use the
:func:`search` function rather than the :func:`match` function:
@ -301,7 +303,8 @@ The special characters are:
Matches if the current position in the string is not preceded by a match for
``...``. This is called a :dfn:`negative lookbehind assertion`. Similar to
positive lookbehind assertions, the contained pattern must only match strings of
some fixed length. Patterns which start with negative lookbehind assertions may
some fixed length and shouldn't contain group references.
Patterns which start with negative lookbehind assertions may
match at the beginning of the string being searched.
``(?(id/name)yes-pattern|no-pattern)``

View File

@ -1067,7 +1067,7 @@ Comparison of objects of the same type depends on the type:
* Numbers are compared arithmetically.
* The values :const:`float('NaN')` and :const:`Decimal('NaN')` are special.
The are identical to themselves, ``x is x`` but are not equal to themselves,
They are identical to themselves, ``x is x`` but are not equal to themselves,
``x != x``. Additionally, comparing any value to a not-a-number value
will return ``False``. For example, both ``3 < float('NaN')`` and
``float('NaN') < 3`` will return ``False``.

View File

@ -145,6 +145,25 @@ class PyDecoratorMethod(PyDecoratorMixin, PyClassmember):
return PyClassmember.run(self)
class PyCoroutineMixin(object):
def handle_signature(self, sig, signode):
ret = super(PyCoroutineMixin, self).handle_signature(sig, signode)
signode.insert(0, addnodes.desc_annotation('coroutine ', 'coroutine '))
return ret
class PyCoroutineFunction(PyCoroutineMixin, PyModulelevel):
def run(self):
self.name = 'py:function'
return PyModulelevel.run(self)
class PyCoroutineMethod(PyCoroutineMixin, PyClassmember):
def run(self):
self.name = 'py:method'
return PyClassmember.run(self)
# Support for documenting version of removal in deprecations
class DeprecatedRemoved(Directive):
@ -347,5 +366,7 @@ def setup(app):
app.add_description_unit('2to3fixer', '2to3fixer', '%s (2to3 fixer)')
app.add_directive_to_domain('py', 'decorator', PyDecoratorFunction)
app.add_directive_to_domain('py', 'decoratormethod', PyDecoratorMethod)
app.add_directive_to_domain('py', 'coroutinefunction', PyCoroutineFunction)
app.add_directive_to_domain('py', 'coroutinemethod', PyCoroutineMethod)
app.add_directive('miscnews', MiscNews)
return {'version': '1.0', 'parallel_read_safe': True}

View File

@ -1052,7 +1052,7 @@ PyAPI_FUNC(Py_ssize_t) PyUnicode_AsWideChar(
always ends with a nul character. If size is not NULL, write the number of
wide characters (excluding the null character) into *size.
Returns a buffer allocated by PyMem_Alloc() (use PyMem_Free() to free it)
Returns a buffer allocated by PyMem_Malloc() (use PyMem_Free() to free it)
on success. On error, returns NULL, *size is undefined and raises a
MemoryError. */

View File

@ -57,6 +57,8 @@ class BaseSubprocessTransport(transports.SubprocessTransport):
info.append('pid=%s' % self._pid)
if self._returncode is not None:
info.append('returncode=%s' % self._returncode)
else:
info.append('running')
stdin = self._pipes.get(0)
if stdin is not None:
@ -93,7 +95,12 @@ class BaseSubprocessTransport(transports.SubprocessTransport):
continue
proto.pipe.close()
if self._proc is not None and self._returncode is None:
if (self._proc is not None
# the child process finished?
and self._returncode is None
# the child process finished but the transport was not notified yet?
and self._proc.poll() is None
):
if self._loop.get_debug():
logger.warning('Close running child process: kill %r', self)

View File

@ -1,7 +1,7 @@
"""Queues"""
__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue',
'QueueFull', 'QueueEmpty']
__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty',
'JoinableQueue']
import collections
import heapq
@ -49,6 +49,9 @@ class Queue:
self._getters = collections.deque()
# Pairs of (item, Future).
self._putters = collections.deque()
self._unfinished_tasks = 0
self._finished = locks.Event(loop=self._loop)
self._finished.set()
self._init(maxsize)
def _init(self, maxsize):
@ -59,6 +62,8 @@ class Queue:
def _put(self, item):
self._queue.append(item)
self._unfinished_tasks += 1
self._finished.clear()
def __repr__(self):
return '<{} at {:#x} {}>'.format(
@ -75,6 +80,8 @@ class Queue:
result += ' _getters[{}]'.format(len(self._getters))
if self._putters:
result += ' _putters[{}]'.format(len(self._putters))
if self._unfinished_tasks:
result += ' tasks={}'.format(self._unfinished_tasks)
return result
def _consume_done_getters(self):
@ -126,9 +133,6 @@ class Queue:
'queue non-empty, why are getters waiting?')
getter = self._getters.popleft()
# Use _put and _get instead of passing item straight to getter, in
# case a subclass has logic that must run (e.g. JoinableQueue).
self._put(item)
# getter cannot be cancelled, we just removed done getters
@ -154,9 +158,6 @@ class Queue:
'queue non-empty, why are getters waiting?')
getter = self._getters.popleft()
# Use _put and _get instead of passing item straight to getter, in
# case a subclass has logic that must run (e.g. JoinableQueue).
self._put(item)
# getter cannot be cancelled, we just removed done getters
@ -219,6 +220,38 @@ class Queue:
else:
raise QueueEmpty
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items have
been processed (meaning that a task_done() call was received for every
item that had been put() into the queue).
Raises ValueError if called more times than there were items placed in
the queue.
"""
if self._unfinished_tasks <= 0:
raise ValueError('task_done() called too many times')
self._unfinished_tasks -= 1
if self._unfinished_tasks == 0:
self._finished.set()
@coroutine
def join(self):
"""Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer calls task_done() to
indicate that the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
if self._unfinished_tasks > 0:
yield from self._finished.wait()
class PriorityQueue(Queue):
"""A subclass of Queue; retrieves entries in priority order (lowest first).
@ -249,54 +282,5 @@ class LifoQueue(Queue):
return self._queue.pop()
class JoinableQueue(Queue):
"""A subclass of Queue with task_done() and join() methods."""
def __init__(self, maxsize=0, *, loop=None):
super().__init__(maxsize=maxsize, loop=loop)
self._unfinished_tasks = 0
self._finished = locks.Event(loop=self._loop)
self._finished.set()
def _format(self):
result = Queue._format(self)
if self._unfinished_tasks:
result += ' tasks={}'.format(self._unfinished_tasks)
return result
def _put(self, item):
super()._put(item)
self._unfinished_tasks += 1
self._finished.clear()
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items have
been processed (meaning that a task_done() call was received for every
item that had been put() into the queue).
Raises ValueError if called more times than there were items placed in
the queue.
"""
if self._unfinished_tasks <= 0:
raise ValueError('task_done() called too many times')
self._unfinished_tasks -= 1
if self._unfinished_tasks == 0:
self._finished.set()
@coroutine
def join(self):
"""Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate that the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
if self._unfinished_tasks > 0:
yield from self._finished.wait()
JoinableQueue = Queue
"""Deprecated alias for Queue."""

View File

@ -21,6 +21,7 @@ is read when the database is opened, and some updates rewrite the whole index)
"""
import ast as _ast
import io as _io
import os as _os
import collections
@ -85,7 +86,7 @@ class _Database(collections.MutableMapping):
with f:
for line in f:
line = line.rstrip()
key, pos_and_siz_pair = eval(line)
key, pos_and_siz_pair = _ast.literal_eval(line)
key = key.encode('Latin-1')
self._index[key] = pos_and_siz_pair

View File

@ -74,12 +74,14 @@ import socket
import collections
from urllib.parse import urlsplit
# HTTPMessage, parse_headers(), and the HTTP status code constants are
# intentionally omitted for simplicity
__all__ = ["HTTPResponse", "HTTPConnection",
"HTTPException", "NotConnected", "UnknownProtocol",
"UnknownTransferEncoding", "UnimplementedFileMode",
"IncompleteRead", "InvalidURL", "ImproperConnectionState",
"CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
"BadStatusLine", "error", "responses"]
"BadStatusLine", "LineTooLong", "error", "responses"]
HTTP_PORT = 80
HTTPS_PORT = 443

View File

@ -82,7 +82,10 @@ XXX To do:
__version__ = "0.6"
__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
__all__ = [
"HTTPServer", "BaseHTTPRequestHandler",
"SimpleHTTPRequestHandler", "CGIHTTPRequestHandler",
]
import html
import http.client
@ -270,7 +273,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
"""
self.command = None # set in case of error on the first line
self.request_version = version = self.default_request_version
self.close_connection = 1
self.close_connection = True
requestline = str(self.raw_requestline, 'iso-8859-1')
requestline = requestline.rstrip('\r\n')
self.requestline = requestline
@ -296,14 +299,14 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
self.send_error(400, "Bad request version (%r)" % version)
return False
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
self.close_connection = 0
self.close_connection = False
if version_number >= (2, 0):
self.send_error(505,
"Invalid HTTP Version (%s)" % base_version_number)
return False
elif len(words) == 2:
command, path = words
self.close_connection = 1
self.close_connection = True
if command != 'GET':
self.send_error(400,
"Bad HTTP/0.9 request type (%r)" % command)
@ -325,10 +328,10 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
conntype = self.headers.get('Connection', "")
if conntype.lower() == 'close':
self.close_connection = 1
self.close_connection = True
elif (conntype.lower() == 'keep-alive' and
self.protocol_version >= "HTTP/1.1"):
self.close_connection = 0
self.close_connection = False
# Examine the headers and look for an Expect directive
expect = self.headers.get('Expect', "")
if (expect.lower() == "100-continue" and
@ -373,7 +376,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
self.send_error(414)
return
if not self.raw_requestline:
self.close_connection = 1
self.close_connection = True
return
if not self.parse_request():
# An error code has been sent, just exit
@ -388,12 +391,12 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
except socket.timeout as e:
#a read or a write timed out. Discard this connection
self.log_error("Request timed out: %r", e)
self.close_connection = 1
self.close_connection = True
return
def handle(self):
"""Handle multiple requests if necessary."""
self.close_connection = 1
self.close_connection = True
self.handle_one_request()
while not self.close_connection:
@ -475,9 +478,9 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
if keyword.lower() == 'connection':
if value.lower() == 'close':
self.close_connection = 1
self.close_connection = True
elif value.lower() == 'keep-alive':
self.close_connection = 0
self.close_connection = False
def end_headers(self):
"""Send the blank line ending the MIME headers."""

View File

@ -73,6 +73,10 @@ class _Flavour(object):
# parts. This makes the result of parsing e.g.
# ("C:", "/", "a") reasonably intuitive.
for part in it:
if not part:
continue
if altsep:
part = part.replace(altsep, sep)
drv = self.splitroot(part)[0]
if drv:
break

View File

@ -270,7 +270,7 @@ def synopsis(filename, cache={}):
except:
return None
del sys.modules['__temp__']
result = (module.__doc__ or '').splitlines()[0]
result = module.__doc__.splitlines()[0] if module.__doc__ else None
# Cache the result.
cache[filename] = (mtime, result)
return result
@ -1407,9 +1407,6 @@ class _PlainTextDoc(TextDoc):
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
# Escape non-encodable characters to avoid encoding errors later
encoding = sys.getfilesystemencoding()
text = text.encode(encoding, 'backslashreplace').decode(encoding)
pager = getpager()
pager(text)
@ -1452,10 +1449,12 @@ def plain(text):
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
import subprocess
proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE)
try:
pipe.write(text)
pipe.close()
with proc:
with io.TextIOWrapper(proc.stdin, errors='backslashreplace') as pipe:
pipe.write(text)
except OSError:
pass # Ignore broken pipes caused by quitting the pager program.
@ -1463,16 +1462,21 @@ def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
with open(filename, 'w') as file:
with open(filename, 'w', errors='backslashreplace') as file:
file.write(text)
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def _escape_stdout(text):
# Escape non-encodable characters to avoid encoding errors later
encoding = getattr(sys.stdout, 'encoding', None) or 'utf-8'
return text.encode(encoding, 'backslashreplace').decode(encoding)
def ttypager(text):
"""Page through text on a text terminal."""
lines = plain(text).split('\n')
lines = plain(_escape_stdout(text)).split('\n')
try:
import tty
fd = sys.stdin.fileno()
@ -1516,7 +1520,7 @@ def ttypager(text):
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
sys.stdout.write(plain(_escape_stdout(text)))
def describe(thing):
"""Produce a short description of the given thing."""
@ -2075,7 +2079,7 @@ class ModuleScanner:
if onerror:
onerror(modname)
continue
desc = (module.__doc__ or '').splitlines()[0]
desc = module.__doc__.splitlines()[0] if module.__doc__ else ''
path = getattr(module,'__file__',None)
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:

View File

@ -69,6 +69,8 @@ class Pattern:
self.open = []
self.groups = 1
self.groupdict = {}
self.lookbehind = 0
def opengroup(self, name=None):
gid = self.groups
self.groups = gid + 1
@ -352,6 +354,11 @@ def _escape(source, escape, state):
if group < state.groups:
if not state.checkgroup(group):
raise error("cannot refer to open group")
if state.lookbehind:
import warnings
warnings.warn('group references in lookbehind '
'assertions are not supported',
RuntimeWarning)
return GROUPREF, group
raise ValueError
if len(escape) == 2:
@ -630,6 +637,11 @@ def _parse(source, state):
if gid is None:
msg = "unknown group name: {0!r}".format(name)
raise error(msg)
if state.lookbehind:
import warnings
warnings.warn('group references in lookbehind '
'assertions are not supported',
RuntimeWarning)
subpatternappend((GROUPREF, gid))
continue
else:
@ -658,7 +670,10 @@ def _parse(source, state):
raise error("syntax error")
dir = -1 # lookbehind
char = sourceget()
state.lookbehind += 1
p = _parse_sub(source, state)
if dir < 0:
state.lookbehind -= 1
if not sourcematch(")"):
raise error("unbalanced parenthesis")
if char == "=":
@ -689,6 +704,11 @@ def _parse(source, state):
condgroup = int(condname)
except ValueError:
raise error("bad character in group name")
if state.lookbehind:
import warnings
warnings.warn('group references in lookbehind '
'assertions are not supported',
RuntimeWarning)
else:
# flags
if not source.next in FLAGS:

View File

@ -170,14 +170,12 @@ else:
# * Prefer any AES-GCM over any AES-CBC for better performance and security
# * Then Use HIGH cipher suites as a fallback
# * Then Use 3DES as fallback which is secure but slow
# * Finally use RC4 as a fallback which is problematic but needed for
# compatibility some times.
# * Disable NULL authentication, NULL encryption, and MD5 MACs for security
# reasons
_DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:'
'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5'
)
# Restricted and more secure ciphers for the server side

View File

@ -2491,16 +2491,16 @@ def main():
_, ext = os.path.splitext(tar_name)
compressions = {
# gz
'gz': 'gz',
'tgz': 'gz',
'.gz': 'gz',
'.tgz': 'gz',
# xz
'xz': 'xz',
'txz': 'xz',
'.xz': 'xz',
'.txz': 'xz',
# bz2
'bz2': 'bz2',
'tbz': 'bz2',
'tbz2': 'bz2',
'tb2': 'bz2',
'.bz2': 'bz2',
'.tbz': 'bz2',
'.tbz2': 'bz2',
'.tb2': 'bz2',
}
tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
tar_files = args.create

View File

@ -2020,6 +2020,12 @@ SERIALIZER = 'xmlrpclib'
class _TestRemoteManager(BaseTestCase):
ALLOWED_TYPES = ('manager',)
values = ['hello world', None, True, 2.25,
'hall\xe5 v\xe4rlden',
'\u043f\u0440\u0438\u0432\u0456\u0442 \u0441\u0432\u0456\u0442',
b'hall\xe5 v\xe4rlden',
]
result = values[:]
@classmethod
def _putter(cls, address, authkey):
@ -2028,7 +2034,8 @@ class _TestRemoteManager(BaseTestCase):
)
manager.connect()
queue = manager.get_queue()
queue.put(('hello world', None, True, 2.25))
# Note that xmlrpclib will deserialize object as a list not a tuple
queue.put(tuple(cls.values))
def test_remote(self):
authkey = os.urandom(32)
@ -2048,8 +2055,7 @@ class _TestRemoteManager(BaseTestCase):
manager2.connect()
queue = manager2.get_queue()
# Note that xmlrpclib will deserialize object as a list not a tuple
self.assertEqual(queue.get(), ['hello world', None, True, 2.25])
self.assertEqual(queue.get(), self.result)
# Because we are using xmlrpclib for serialization instead of
# pickle this will cause a serialization error.
@ -3405,12 +3411,12 @@ class TestNoForkBomb(unittest.TestCase):
name = os.path.join(os.path.dirname(__file__), 'mp_fork_bomb.py')
if sm != 'fork':
rc, out, err = test.script_helper.assert_python_failure(name, sm)
self.assertEqual('', out.decode('ascii'))
self.assertIn('RuntimeError', err.decode('ascii'))
self.assertEqual(out, b'')
self.assertIn(b'RuntimeError', err)
else:
rc, out, err = test.script_helper.assert_python_ok(name, sm)
self.assertEqual('123', out.decode('ascii').rstrip())
self.assertEqual('', err.decode('ascii'))
self.assertEqual(out.rstrip(), b'123')
self.assertEqual(err, b'')
#
# Issue #17555: ForkAwareThreadLock

View File

@ -144,21 +144,22 @@ def create_dynamic_class(name, bases):
# the object returned by create_data().
DATA0 = (
b'(lp0\nL0L\naL1L\naF2.0\nac'
b'builtins\ncomplex\n'
b'p1\n(F3.0\nF0.0\ntp2\nRp'
b'3\naL1L\naL-1L\naL255L\naL-'
b'255L\naL-256L\naL65535L\na'
b'L-65535L\naL-65536L\naL2'
b'147483647L\naL-2147483'
b'647L\naL-2147483648L\na('
b'Vabc\np4\ng4\nccopyreg'
b'\n_reconstructor\np5\n('
b'c__main__\nC\np6\ncbu'
b'iltins\nobject\np7\nNt'
b'p8\nRp9\n(dp10\nVfoo\np1'
b'1\nL1L\nsVbar\np12\nL2L\nsb'
b'g9\ntp13\nag13\naL5L\na.'
b'(lp0\nL0L\naL1L\naF2.0\n'
b'ac__builtin__\ncomple'
b'x\np1\n(F3.0\nF0.0\ntp2\n'
b'Rp3\naL1L\naL-1L\naL255'
b'L\naL-255L\naL-256L\naL'
b'65535L\naL-65535L\naL-'
b'65536L\naL2147483647L'
b'\naL-2147483647L\naL-2'
b'147483648L\na(Vabc\np4'
b'\ng4\nccopy_reg\n_recon'
b'structor\np5\n(c__main'
b'__\nC\np6\nc__builtin__'
b'\nobject\np7\nNtp8\nRp9\n'
b'(dp10\nVfoo\np11\nL1L\ns'
b'Vbar\np12\nL2L\nsbg9\ntp'
b'13\nag13\naL5L\na.'
)
# Disassembly of DATA0
@ -172,88 +173,88 @@ DATA0_DIS = """\
14: a APPEND
15: F FLOAT 2.0
20: a APPEND
21: c GLOBAL 'builtins complex'
39: p PUT 1
42: ( MARK
43: F FLOAT 3.0
48: F FLOAT 0.0
53: t TUPLE (MARK at 42)
54: p PUT 2
57: R REDUCE
58: p PUT 3
61: a APPEND
62: L LONG 1
66: a APPEND
67: L LONG -1
72: a APPEND
73: L LONG 255
79: a APPEND
80: L LONG -255
87: a APPEND
88: L LONG -256
95: a APPEND
96: L LONG 65535
104: a APPEND
105: L LONG -65535
114: a APPEND
115: L LONG -65536
124: a APPEND
125: L LONG 2147483647
138: a APPEND
139: L LONG -2147483647
153: a APPEND
154: L LONG -2147483648
168: a APPEND
169: ( MARK
170: V UNICODE 'abc'
175: p PUT 4
178: g GET 4
181: c GLOBAL 'copyreg _reconstructor'
205: p PUT 5
208: ( MARK
209: c GLOBAL '__main__ C'
221: p PUT 6
224: c GLOBAL 'builtins object'
241: p PUT 7
244: N NONE
245: t TUPLE (MARK at 208)
246: p PUT 8
249: R REDUCE
250: p PUT 9
253: ( MARK
254: d DICT (MARK at 253)
255: p PUT 10
259: V UNICODE 'foo'
264: p PUT 11
268: L LONG 1
272: s SETITEM
273: V UNICODE 'bar'
278: p PUT 12
282: L LONG 2
286: s SETITEM
287: b BUILD
288: g GET 9
291: t TUPLE (MARK at 169)
292: p PUT 13
296: a APPEND
297: g GET 13
301: a APPEND
302: L LONG 5
306: a APPEND
307: . STOP
21: c GLOBAL '__builtin__ complex'
42: p PUT 1
45: ( MARK
46: F FLOAT 3.0
51: F FLOAT 0.0
56: t TUPLE (MARK at 45)
57: p PUT 2
60: R REDUCE
61: p PUT 3
64: a APPEND
65: L LONG 1
69: a APPEND
70: L LONG -1
75: a APPEND
76: L LONG 255
82: a APPEND
83: L LONG -255
90: a APPEND
91: L LONG -256
98: a APPEND
99: L LONG 65535
107: a APPEND
108: L LONG -65535
117: a APPEND
118: L LONG -65536
127: a APPEND
128: L LONG 2147483647
141: a APPEND
142: L LONG -2147483647
156: a APPEND
157: L LONG -2147483648
171: a APPEND
172: ( MARK
173: V UNICODE 'abc'
178: p PUT 4
181: g GET 4
184: c GLOBAL 'copy_reg _reconstructor'
209: p PUT 5
212: ( MARK
213: c GLOBAL '__main__ C'
225: p PUT 6
228: c GLOBAL '__builtin__ object'
248: p PUT 7
251: N NONE
252: t TUPLE (MARK at 212)
253: p PUT 8
256: R REDUCE
257: p PUT 9
260: ( MARK
261: d DICT (MARK at 260)
262: p PUT 10
266: V UNICODE 'foo'
271: p PUT 11
275: L LONG 1
279: s SETITEM
280: V UNICODE 'bar'
285: p PUT 12
289: L LONG 2
293: s SETITEM
294: b BUILD
295: g GET 9
298: t TUPLE (MARK at 172)
299: p PUT 13
303: a APPEND
304: g GET 13
308: a APPEND
309: L LONG 5
313: a APPEND
314: . STOP
highest protocol among opcodes = 0
"""
DATA1 = (
b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
b'builtins\ncomplex\nq\x01'
b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c__'
b'builtin__\ncomplex\nq\x01'
b'(G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00t'
b'q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xffJ'
b'\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff'
b'\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00ab'
b'cq\x04h\x04ccopyreg\n_reco'
b'cq\x04h\x04ccopy_reg\n_reco'
b'nstructor\nq\x05(c__main'
b'__\nC\nq\x06cbuiltins\n'
b'__\nC\nq\x06c__builtin__\n'
b'object\nq\x07Ntq\x08Rq\t}q\n('
b'X\x03\x00\x00\x00fooq\x0bK\x01X\x03\x00\x00\x00bar'
b'q\x0cK\x02ubh\ttq\rh\rK\x05e.'
@ -267,66 +268,66 @@ DATA1_DIS = """\
4: K BININT1 0
6: K BININT1 1
8: G BINFLOAT 2.0
17: c GLOBAL 'builtins complex'
35: q BINPUT 1
37: ( MARK
38: G BINFLOAT 3.0
47: G BINFLOAT 0.0
56: t TUPLE (MARK at 37)
57: q BINPUT 2
59: R REDUCE
60: q BINPUT 3
62: K BININT1 1
64: J BININT -1
69: K BININT1 255
71: J BININT -255
76: J BININT -256
81: M BININT2 65535
84: J BININT -65535
89: J BININT -65536
94: J BININT 2147483647
99: J BININT -2147483647
104: J BININT -2147483648
109: ( MARK
110: X BINUNICODE 'abc'
118: q BINPUT 4
120: h BINGET 4
122: c GLOBAL 'copyreg _reconstructor'
146: q BINPUT 5
148: ( MARK
149: c GLOBAL '__main__ C'
161: q BINPUT 6
163: c GLOBAL 'builtins object'
180: q BINPUT 7
182: N NONE
183: t TUPLE (MARK at 148)
184: q BINPUT 8
186: R REDUCE
187: q BINPUT 9
189: } EMPTY_DICT
190: q BINPUT 10
192: ( MARK
193: X BINUNICODE 'foo'
201: q BINPUT 11
203: K BININT1 1
205: X BINUNICODE 'bar'
213: q BINPUT 12
215: K BININT1 2
217: u SETITEMS (MARK at 192)
218: b BUILD
219: h BINGET 9
221: t TUPLE (MARK at 109)
222: q BINPUT 13
224: h BINGET 13
226: K BININT1 5
228: e APPENDS (MARK at 3)
229: . STOP
17: c GLOBAL '__builtin__ complex'
38: q BINPUT 1
40: ( MARK
41: G BINFLOAT 3.0
50: G BINFLOAT 0.0
59: t TUPLE (MARK at 40)
60: q BINPUT 2
62: R REDUCE
63: q BINPUT 3
65: K BININT1 1
67: J BININT -1
72: K BININT1 255
74: J BININT -255
79: J BININT -256
84: M BININT2 65535
87: J BININT -65535
92: J BININT -65536
97: J BININT 2147483647
102: J BININT -2147483647
107: J BININT -2147483648
112: ( MARK
113: X BINUNICODE 'abc'
121: q BINPUT 4
123: h BINGET 4
125: c GLOBAL 'copy_reg _reconstructor'
150: q BINPUT 5
152: ( MARK
153: c GLOBAL '__main__ C'
165: q BINPUT 6
167: c GLOBAL '__builtin__ object'
187: q BINPUT 7
189: N NONE
190: t TUPLE (MARK at 152)
191: q BINPUT 8
193: R REDUCE
194: q BINPUT 9
196: } EMPTY_DICT
197: q BINPUT 10
199: ( MARK
200: X BINUNICODE 'foo'
208: q BINPUT 11
210: K BININT1 1
212: X BINUNICODE 'bar'
220: q BINPUT 12
222: K BININT1 2
224: u SETITEMS (MARK at 199)
225: b BUILD
226: h BINGET 9
228: t TUPLE (MARK at 112)
229: q BINPUT 13
231: h BINGET 13
233: K BININT1 5
235: e APPENDS (MARK at 3)
236: . STOP
highest protocol among opcodes = 1
"""
DATA2 = (
b'\x80\x02]q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
b'builtins\ncomplex\n'
b'__builtin__\ncomplex\n'
b'q\x01G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x86q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xff'
b'J\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff'
@ -346,52 +347,52 @@ DATA2_DIS = """\
6: K BININT1 0
8: K BININT1 1
10: G BINFLOAT 2.0
19: c GLOBAL 'builtins complex'
37: q BINPUT 1
39: G BINFLOAT 3.0
48: G BINFLOAT 0.0
57: \x86 TUPLE2
58: q BINPUT 2
60: R REDUCE
61: q BINPUT 3
63: K BININT1 1
65: J BININT -1
70: K BININT1 255
72: J BININT -255
77: J BININT -256
82: M BININT2 65535
85: J BININT -65535
90: J BININT -65536
95: J BININT 2147483647
100: J BININT -2147483647
105: J BININT -2147483648
110: ( MARK
111: X BINUNICODE 'abc'
119: q BINPUT 4
121: h BINGET 4
123: c GLOBAL '__main__ C'
135: q BINPUT 5
137: ) EMPTY_TUPLE
138: \x81 NEWOBJ
139: q BINPUT 6
141: } EMPTY_DICT
142: q BINPUT 7
144: ( MARK
145: X BINUNICODE 'foo'
153: q BINPUT 8
155: K BININT1 1
157: X BINUNICODE 'bar'
165: q BINPUT 9
167: K BININT1 2
169: u SETITEMS (MARK at 144)
170: b BUILD
171: h BINGET 6
173: t TUPLE (MARK at 110)
174: q BINPUT 10
176: h BINGET 10
178: K BININT1 5
180: e APPENDS (MARK at 5)
181: . STOP
19: c GLOBAL '__builtin__ complex'
40: q BINPUT 1
42: G BINFLOAT 3.0
51: G BINFLOAT 0.0
60: \x86 TUPLE2
61: q BINPUT 2
63: R REDUCE
64: q BINPUT 3
66: K BININT1 1
68: J BININT -1
73: K BININT1 255
75: J BININT -255
80: J BININT -256
85: M BININT2 65535
88: J BININT -65535
93: J BININT -65536
98: J BININT 2147483647
103: J BININT -2147483647
108: J BININT -2147483648
113: ( MARK
114: X BINUNICODE 'abc'
122: q BINPUT 4
124: h BINGET 4
126: c GLOBAL '__main__ C'
138: q BINPUT 5
140: ) EMPTY_TUPLE
141: \x81 NEWOBJ
142: q BINPUT 6
144: } EMPTY_DICT
145: q BINPUT 7
147: ( MARK
148: X BINUNICODE 'foo'
156: q BINPUT 8
158: K BININT1 1
160: X BINUNICODE 'bar'
168: q BINPUT 9
170: K BININT1 2
172: u SETITEMS (MARK at 147)
173: b BUILD
174: h BINGET 6
176: t TUPLE (MARK at 113)
177: q BINPUT 10
179: h BINGET 10
181: K BININT1 5
183: e APPENDS (MARK at 5)
184: . STOP
highest protocol among opcodes = 2
"""
@ -570,14 +571,14 @@ class AbstractPickleTests(unittest.TestCase):
xname = X.__name__.encode('ascii')
# Protocol 0 (text mode pickle):
"""
0: ( MARK
1: i INST '__main__ X' (MARK at 0)
15: p PUT 0
18: ( MARK
19: d DICT (MARK at 18)
20: p PUT 1
23: b BUILD
24: . STOP
0: ( MARK
1: i INST '__main__ X' (MARK at 0)
13: p PUT 0
16: ( MARK
17: d DICT (MARK at 16)
18: p PUT 1
21: b BUILD
22: . STOP
"""
pickle0 = (b"(i__main__\n"
b"X\n"
@ -587,15 +588,15 @@ class AbstractPickleTests(unittest.TestCase):
# Protocol 1 (binary mode pickle)
"""
0: ( MARK
1: c GLOBAL '__main__ X'
15: q BINPUT 0
17: o OBJ (MARK at 0)
18: q BINPUT 1
20: } EMPTY_DICT
21: q BINPUT 2
23: b BUILD
24: . STOP
0: ( MARK
1: c GLOBAL '__main__ X'
13: q BINPUT 0
15: o OBJ (MARK at 0)
16: q BINPUT 1
18: } EMPTY_DICT
19: q BINPUT 2
21: b BUILD
22: . STOP
"""
pickle1 = (b'(c__main__\n'
b'X\n'
@ -604,16 +605,16 @@ class AbstractPickleTests(unittest.TestCase):
# Protocol 2 (pickle2 = b'\x80\x02' + pickle1)
"""
0: \x80 PROTO 2
2: ( MARK
3: c GLOBAL '__main__ X'
17: q BINPUT 0
19: o OBJ (MARK at 2)
20: q BINPUT 1
22: } EMPTY_DICT
23: q BINPUT 2
25: b BUILD
26: . STOP
0: \x80 PROTO 2
2: ( MARK
3: c GLOBAL '__main__ X'
15: q BINPUT 0
17: o OBJ (MARK at 2)
18: q BINPUT 1
20: } EMPTY_DICT
21: q BINPUT 2
23: b BUILD
24: . STOP
"""
pickle2 = (b'\x80\x02(c__main__\n'
b'X\n'

View File

@ -9,7 +9,6 @@ import locale
import sys
import unittest
from platform import uname
from test.support import run_unittest
if uname().system == "Darwin":
maj, min, mic = [int(part) for part in uname().release.split(".")]
@ -24,45 +23,52 @@ candidate_locales = ['es_UY', 'fr_FR', 'fi_FI', 'es_CO', 'pt_PT', 'it_IT',
'da_DK', 'nn_NO', 'cs_CZ', 'de_LU', 'es_BO', 'sq_AL', 'sk_SK', 'fr_CH',
'de_DE', 'sr_YU', 'br_FR', 'nl_BE', 'sv_FI', 'pl_PL', 'fr_CA', 'fo_FO',
'bs_BA', 'fr_LU', 'kl_GL', 'fa_IR', 'de_BE', 'sv_SE', 'it_CH', 'uk_UA',
'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'en_US',
'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'ps_AF', 'en_US',
'es_ES.ISO8859-1', 'fr_FR.ISO8859-15', 'ru_RU.KOI8-R', 'ko_KR.eucKR']
# Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to
# workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses
# the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is
# decoded as U+30000020 (an invalid character) by mbstowcs().
if sys.platform == 'sunos5':
old_locale = locale.setlocale(locale.LC_ALL)
try:
locales = []
for loc in candidate_locales:
try:
locale.setlocale(locale.LC_ALL, loc)
except Error:
continue
encoding = locale.getpreferredencoding(False)
try:
localeconv()
except Exception as err:
print("WARNING: Skip locale %s (encoding %s): [%s] %s"
% (loc, encoding, type(err), err))
else:
locales.append(loc)
candidate_locales = locales
finally:
locale.setlocale(locale.LC_ALL, old_locale)
def setUpModule():
global candidate_locales
# Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to
# workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses
# the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is
# decoded as U+30000020 (an invalid character) by mbstowcs().
if sys.platform == 'sunos5':
old_locale = locale.setlocale(locale.LC_ALL)
try:
locales = []
for loc in candidate_locales:
try:
locale.setlocale(locale.LC_ALL, loc)
except Error:
continue
encoding = locale.getpreferredencoding(False)
try:
localeconv()
except Exception as err:
print("WARNING: Skip locale %s (encoding %s): [%s] %s"
% (loc, encoding, type(err), err))
else:
locales.append(loc)
candidate_locales = locales
finally:
locale.setlocale(locale.LC_ALL, old_locale)
# Workaround for MSVC6(debug) crash bug
if "MSC v.1200" in sys.version:
def accept(loc):
a = loc.split(".")
return not(len(a) == 2 and len(a[-1]) >= 9)
candidate_locales = [loc for loc in candidate_locales if accept(loc)]
# Workaround for MSVC6(debug) crash bug
if "MSC v.1200" in sys.version:
def accept(loc):
a = loc.split(".")
return not(len(a) == 2 and len(a[-1]) >= 9)
candidate_locales = [loc for loc in candidate_locales if accept(loc)]
# List known locale values to test against when available.
# Dict formatted as ``<locale> : (<decimal_point>, <thousands_sep>)``. If a
# value is not known, use '' .
known_numerics = {'fr_FR' : (',', ''), 'en_US':('.', ',')}
known_numerics = {
'en_US': ('.', ','),
'fr_FR' : (',', ' '),
'de_DE' : (',', '.'),
'ps_AF': ('\u066b', '\u066c'),
}
class _LocaleTests(unittest.TestCase):
@ -91,10 +97,12 @@ class _LocaleTests(unittest.TestCase):
calc_value, known_value,
calc_type, data_type, set_locale,
used_locale))
return True
@unittest.skipUnless(nl_langinfo, "nl_langinfo is not available")
def test_lc_numeric_nl_langinfo(self):
# Test nl_langinfo against known values
tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@ -103,10 +111,14 @@ class _LocaleTests(unittest.TestCase):
continue
for li, lc in ((RADIXCHAR, "decimal_point"),
(THOUSEP, "thousands_sep")):
self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc)
if self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc):
tested = True
if not tested:
self.skipTest('no suitable locales')
def test_lc_numeric_localeconv(self):
# Test localeconv against known values
tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@ -116,11 +128,15 @@ class _LocaleTests(unittest.TestCase):
formatting = localeconv()
for lc in ("decimal_point",
"thousands_sep"):
self.numeric_tester('localeconv', formatting[lc], lc, loc)
if self.numeric_tester('localeconv', formatting[lc], lc, loc):
tested = True
if not tested:
self.skipTest('no suitable locales')
@unittest.skipUnless(nl_langinfo, "nl_langinfo is not available")
def test_lc_numeric_basic(self):
# Test nl_langinfo against localeconv
tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@ -140,10 +156,14 @@ class _LocaleTests(unittest.TestCase):
"(set to %s, using %s)" % (
nl_radixchar, li_radixchar,
loc, set_locale))
tested = True
if not tested:
self.skipTest('no suitable locales')
def test_float_parsing(self):
# Bug #1391872: Test whether float parsing is okay on European
# locales.
tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@ -162,9 +182,10 @@ class _LocaleTests(unittest.TestCase):
if localeconv()['decimal_point'] != '.':
self.assertRaises(ValueError, float,
localeconv()['decimal_point'].join(['1', '23']))
tested = True
if not tested:
self.skipTest('no suitable locales')
def test_main():
run_unittest(_LocaleTests)
if __name__ == '__main__':
test_main()
unittest.main()

View File

@ -408,14 +408,14 @@ class PriorityQueueTests(_QueueTestBase):
self.assertEqual([1, 2, 3], items)
class JoinableQueueTests(_QueueTestBase):
class QueueJoinTests(_QueueTestBase):
def test_task_done_underflow(self):
q = asyncio.JoinableQueue(loop=self.loop)
q = asyncio.Queue(loop=self.loop)
self.assertRaises(ValueError, q.task_done)
def test_task_done(self):
q = asyncio.JoinableQueue(loop=self.loop)
q = asyncio.Queue(loop=self.loop)
for i in range(100):
q.put_nowait(i)
@ -452,7 +452,7 @@ class JoinableQueueTests(_QueueTestBase):
self.loop.run_until_complete(asyncio.wait(tasks, loop=self.loop))
def test_join_empty_queue(self):
q = asyncio.JoinableQueue(loop=self.loop)
q = asyncio.Queue(loop=self.loop)
# Test that a queue join()s successfully, and before anything else
# (done twice for insurance).
@ -465,7 +465,7 @@ class JoinableQueueTests(_QueueTestBase):
self.loop.run_until_complete(join())
def test_format(self):
q = asyncio.JoinableQueue(loop=self.loop)
q = asyncio.Queue(loop=self.loop)
self.assertEqual(q._format(), 'maxsize=0')
q._unfinished_tasks = 2

View File

@ -349,6 +349,70 @@ class SubprocessMixin:
self.loop.run_until_complete(cancel_make_transport())
test_utils.run_briefly(self.loop)
def test_close_kill_running(self):
@asyncio.coroutine
def kill_running():
create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
*PROGRAM_BLOCKED)
transport, protocol = yield from create
kill_called = False
def kill():
nonlocal kill_called
kill_called = True
orig_kill()
proc = transport.get_extra_info('subprocess')
orig_kill = proc.kill
proc.kill = kill
returncode = transport.get_returncode()
transport.close()
yield from transport._wait()
return (returncode, kill_called)
# Ignore "Close running child process: kill ..." log
with test_utils.disable_logger():
returncode, killed = self.loop.run_until_complete(kill_running())
self.assertIsNone(returncode)
# transport.close() must kill the process if it is still running
self.assertTrue(killed)
test_utils.run_briefly(self.loop)
def test_close_dont_kill_finished(self):
@asyncio.coroutine
def kill_running():
create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
*PROGRAM_BLOCKED)
transport, protocol = yield from create
proc = transport.get_extra_info('subprocess')
# kill the process (but asyncio is not notified immediatly)
proc.kill()
proc.wait()
proc.kill = mock.Mock()
proc_returncode = proc.poll()
transport_returncode = transport.get_returncode()
transport.close()
return (proc_returncode, transport_returncode, proc.kill.called)
# Ignore "Unknown child process pid ..." log of SafeChildWatcher,
# emitted because the test already consumes the exit status:
# proc.wait()
with test_utils.disable_logger():
result = self.loop.run_until_complete(kill_running())
test_utils.run_briefly(self.loop)
proc_returncode, transport_return_code, killed = result
self.assertIsNotNone(proc_returncode)
self.assertIsNone(transport_return_code)
# transport.close() must not kill the process if it finished, even if
# the transport was not notified yet
self.assertFalse(killed)
if sys.platform != 'win32':
# Unix

View File

@ -295,7 +295,7 @@ class SelectorEventLoopUnixSocketTests(test_utils.TestCase):
def test_create_unix_connection_path_sock(self):
coro = self.loop.create_unix_connection(
lambda: None, '/dev/null', sock=object())
lambda: None, os.devnull, sock=object())
with self.assertRaisesRegex(ValueError, 'path and sock can not be'):
self.loop.run_until_complete(coro)
@ -308,14 +308,14 @@ class SelectorEventLoopUnixSocketTests(test_utils.TestCase):
def test_create_unix_connection_nossl_serverhost(self):
coro = self.loop.create_unix_connection(
lambda: None, '/dev/null', server_hostname='spam')
lambda: None, os.devnull, server_hostname='spam')
with self.assertRaisesRegex(ValueError,
'server_hostname is only meaningful'):
self.loop.run_until_complete(coro)
def test_create_unix_connection_ssl_noserverhost(self):
coro = self.loop.create_unix_connection(
lambda: None, '/dev/null', ssl=True)
lambda: None, os.devnull, ssl=True)
with self.assertRaisesRegex(
ValueError, 'you have to pass server_hostname when using ssl'):

View File

@ -87,11 +87,11 @@ class BZ2FileTest(BaseTest):
def testBadArgs(self):
self.assertRaises(TypeError, BZ2File, 123.456)
self.assertRaises(ValueError, BZ2File, "/dev/null", "z")
self.assertRaises(ValueError, BZ2File, "/dev/null", "rx")
self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt")
self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0)
self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10)
self.assertRaises(ValueError, BZ2File, os.devnull, "z")
self.assertRaises(ValueError, BZ2File, os.devnull, "rx")
self.assertRaises(ValueError, BZ2File, os.devnull, "rbt")
self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=0)
self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=10)
def testRead(self):
self.createTempFile()

View File

@ -186,9 +186,9 @@ class CgiTests(unittest.TestCase):
cgi.initlog("%s", "Testing initlog 1")
cgi.log("%s", "Testing log 2")
self.assertEqual(cgi.logfp.getvalue(), "Testing initlog 1\nTesting log 2\n")
if os.path.exists("/dev/null"):
if os.path.exists(os.devnull):
cgi.logfp = None
cgi.logfile = "/dev/null"
cgi.logfile = os.devnull
cgi.initlog("%s", "Testing log 3")
self.addCleanup(cgi.closelog)
cgi.log("Testing log 4")

View File

@ -217,6 +217,15 @@ class DumbDBMTestCase(unittest.TestCase):
self.assertEqual(str(cm.exception),
"DBM object has already been closed")
def test_eval(self):
with open(_fname + '.dir', 'w') as stream:
stream.write("str(print('Hacked!')), 0\n")
with support.captured_stdout() as stdout:
with self.assertRaises(ValueError):
with dumbdbm.open(_fname) as f:
pass
self.assertEqual(stdout.getvalue(), '')
def tearDown(self):
_delete_files()

View File

@ -155,9 +155,9 @@ class TestPartialC(TestPartial, unittest.TestCase):
def test_repr(self):
args = (object(), object())
args_repr = ', '.join(repr(a) for a in args)
#kwargs = {'a': object(), 'b': object()}
kwargs = {'a': object()}
kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items())
kwargs = {'a': object(), 'b': object()}
kwargs_reprs = ['a={a!r}, b={b!r}'.format_map(kwargs),
'b={b!r}, a={a!r}'.format_map(kwargs)]
if self.partial is c_functools.partial:
name = 'functools.partial'
else:
@ -172,12 +172,14 @@ class TestPartialC(TestPartial, unittest.TestCase):
repr(f))
f = self.partial(capture, **kwargs)
self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr),
repr(f))
self.assertIn(repr(f),
['{}({!r}, {})'.format(name, capture, kwargs_repr)
for kwargs_repr in kwargs_reprs])
f = self.partial(capture, *args, **kwargs)
self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr),
repr(f))
self.assertIn(repr(f),
['{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr)
for kwargs_repr in kwargs_reprs])
def test_pickle(self):
f = self.partial(signature, 'asdf', bar=True)

View File

@ -190,6 +190,8 @@ class DebuggerTests(unittest.TestCase):
'linux-vdso.so',
'warning: Could not load shared library symbols for '
'linux-gate.so',
'warning: Could not load shared library symbols for '
'linux-vdso64.so',
'Do you need "set solib-search-path" or '
'"set sysroot"?',
'warning: Source file is more recent than executable.',

View File

@ -708,7 +708,22 @@ class BasicTest(TestCase):
self.assertTrue(response.closed)
self.assertTrue(conn.sock.file_closed)
class OfflineTest(TestCase):
def test_all(self):
# Documented objects defined in the module should be in __all__
expected = {"responses"} # White-list documented dict() object
# HTTPMessage, parse_headers(), and the HTTP status code constants are
# intentionally omitted for simplicity
blacklist = {"HTTPMessage", "parse_headers"}
for name in dir(client):
if name in blacklist:
continue
module_object = getattr(client, name)
if getattr(module_object, "__module__", None) == "http.client":
expected.add(name)
self.assertCountEqual(client.__all__, expected)
def test_responses(self):
self.assertEqual(client.responses[client.NOT_FOUND], "Not Found")

View File

@ -616,6 +616,11 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[1:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
self.assertEqual(self.handler.command, 'GET')
self.assertEqual(self.handler.path, '/')
self.assertEqual(self.handler.request_version, 'HTTP/1.1')
self.assertSequenceEqual(self.handler.headers.items(), ())
def test_http_1_0(self):
result = self.send_typical_request(b'GET / HTTP/1.0\r\n\r\n')
@ -623,6 +628,11 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[1:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0')
self.assertEqual(self.handler.command, 'GET')
self.assertEqual(self.handler.path, '/')
self.assertEqual(self.handler.request_version, 'HTTP/1.0')
self.assertSequenceEqual(self.handler.headers.items(), ())
def test_http_0_9(self):
result = self.send_typical_request(b'GET / HTTP/0.9\r\n\r\n')
@ -636,6 +646,12 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[1:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0')
self.assertEqual(self.handler.command, 'GET')
self.assertEqual(self.handler.path, '/')
self.assertEqual(self.handler.request_version, 'HTTP/1.0')
headers = (("Expect", "100-continue"),)
self.assertSequenceEqual(self.handler.headers.items(), headers)
def test_with_continue_1_1(self):
result = self.send_typical_request(b'GET / HTTP/1.1\r\nExpect: 100-continue\r\n\r\n')
@ -645,6 +661,12 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[2:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
self.assertEqual(self.handler.command, 'GET')
self.assertEqual(self.handler.path, '/')
self.assertEqual(self.handler.request_version, 'HTTP/1.1')
headers = (("Expect", "100-continue"),)
self.assertSequenceEqual(self.handler.headers.items(), headers)
def test_header_buffering_of_send_error(self):
@ -730,6 +752,7 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
result = self.send_typical_request(b'GET ' + b'x' * 65537)
self.assertEqual(result[0], b'HTTP/1.1 414 Request-URI Too Long\r\n')
self.assertFalse(self.handler.get_called)
self.assertIsInstance(self.handler.requestline, str)
def test_header_length(self):
# Issue #6791: same for headers
@ -737,6 +760,22 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
b'GET / HTTP/1.1\r\nX-Foo: bar' + b'r' * 65537 + b'\r\n\r\n')
self.assertEqual(result[0], b'HTTP/1.1 400 Line too long\r\n')
self.assertFalse(self.handler.get_called)
self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
def test_close_connection(self):
# handle_one_request() should be repeatedly called until
# it sets close_connection
def handle_one_request():
self.handler.close_connection = next(close_values)
self.handler.handle_one_request = handle_one_request
close_values = iter((True,))
self.handler.handle()
self.assertRaises(StopIteration, next, close_values)
close_values = iter((False, False, True))
self.handler.handle()
self.assertRaises(StopIteration, next, close_values)
class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
""" Test url parsing """
@ -760,6 +799,19 @@ class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
self.assertEqual(path, self.translated)
class MiscTestCase(unittest.TestCase):
def test_all(self):
expected = []
blacklist = {'executable', 'nobody_uid', 'test'}
for name in dir(server):
if name.startswith('_') or name in blacklist:
continue
module_object = getattr(server, name)
if getattr(module_object, '__module__', None) == 'http.server':
expected.append(name)
self.assertCountEqual(server.__all__, expected)
def test_main(verbose=None):
cwd = os.getcwd()
try:
@ -769,6 +821,7 @@ def test_main(verbose=None):
SimpleHTTPServerTestCase,
CGIHTTPServerTestCase,
SimpleHTTPRequestHandlerTestCase,
MiscTestCase,
)
finally:
os.chdir(cwd)

View File

@ -593,13 +593,44 @@ class IOTest(unittest.TestCase):
with self.open(zero, "r") as f:
self.assertRaises(OverflowError, f.read)
def test_flush_error_on_close(self):
f = self.open(support.TESTFN, "wb", buffering=0)
def check_flush_error_on_close(self, *args, **kwargs):
# Test that the file is closed despite failed flush
# and that flush() is called before file closed.
f = self.open(*args, **kwargs)
closed = []
def bad_flush():
closed[:] = [f.closed]
raise OSError()
f.flush = bad_flush
self.assertRaises(OSError, f.close) # exception not swallowed
self.assertTrue(f.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
f.flush = lambda: None # break reference loop
def test_flush_error_on_close(self):
# raw file
# Issue #5700: io.FileIO calls flush() after file closed
self.check_flush_error_on_close(support.TESTFN, 'wb', buffering=0)
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0)
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False)
os.close(fd)
# buffered io
self.check_flush_error_on_close(support.TESTFN, 'wb')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', closefd=False)
os.close(fd)
# text io
self.check_flush_error_on_close(support.TESTFN, 'w')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w')
fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w', closefd=False)
os.close(fd)
def test_multi_close(self):
f = self.open(support.TESTFN, "wb", buffering=0)
@ -788,13 +819,22 @@ class CommonBufferedTests:
self.assertEqual(repr(b), "<%s name=b'dummy'>" % clsname)
def test_flush_error_on_close(self):
# Test that buffered file is closed despite failed flush
# and that flush() is called before file closed.
raw = self.MockRawIO()
closed = []
def bad_flush():
closed[:] = [b.closed, raw.closed]
raise OSError()
raw.flush = bad_flush
b = self.tp(raw)
self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
self.assertTrue(raw.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
self.assertFalse(closed[1])
raw.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
raw = self.MockRawIO()
@ -2618,12 +2658,21 @@ class TextIOWrapperTest(unittest.TestCase):
self.assertEqual(content.count("Thread%03d\n" % n), 1)
def test_flush_error_on_close(self):
# Test that text file is closed despite failed flush
# and that flush() is called before file closed.
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
closed = []
def bad_flush():
closed[:] = [txt.closed, txt.buffer.closed]
raise OSError()
txt.flush = bad_flush
self.assertRaises(OSError, txt.close) # exception not swallowed
self.assertTrue(txt.closed)
self.assertTrue(txt.buffer.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
self.assertFalse(closed[1])
txt.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
buffer = self.BytesIO(self.testdata)

View File

@ -511,7 +511,7 @@ class TestMiscellaneous(unittest.TestCase):
self.skipTest('test needs Turkish locale')
loc = locale.getlocale(locale.LC_CTYPE)
if verbose:
print('got locale %a' % (loc,))
print('testing with %a' % (loc,), end=' ', flush=True)
locale.setlocale(locale.LC_CTYPE, loc)
self.assertEqual(loc, locale.getlocale(locale.LC_CTYPE))

View File

@ -44,6 +44,13 @@ class Test_MultibyteCodec(unittest.TestCase):
self.assertRaises(IndexError, dec,
b'apple\x92ham\x93spam', 'test.cjktest')
def test_errorcallback_custom_ignore(self):
# Issue #23215: MemoryError with custom error handlers and multibyte codecs
data = 100 * "\udc00"
codecs.register_error("test.ignore", codecs.ignore_errors)
for enc in ALL_CJKENCODINGS:
self.assertEqual(data.encode(enc, "test.ignore"), b'')
def test_codingspec(self):
try:
for enc in ALL_CJKENCODINGS:

View File

@ -105,31 +105,35 @@ class NTFlavourTest(_BaseFlavourTest, unittest.TestCase):
check = self._check_parse_parts
# First part is anchored
check(['c:'], ('c:', '', ['c:']))
check(['c:\\'], ('c:', '\\', ['c:\\']))
check(['\\'], ('', '\\', ['\\']))
check(['c:/'], ('c:', '\\', ['c:\\']))
check(['/'], ('', '\\', ['\\']))
check(['c:a'], ('c:', '', ['c:', 'a']))
check(['c:\\a'], ('c:', '\\', ['c:\\', 'a']))
check(['\\a'], ('', '\\', ['\\', 'a']))
check(['c:/a'], ('c:', '\\', ['c:\\', 'a']))
check(['/a'], ('', '\\', ['\\', 'a']))
# UNC paths
check(['\\\\a\\b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['\\\\a\\b\\'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['\\\\a\\b\\c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
# Second part is anchored, so that the first part is ignored
check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c']))
check(['a', 'Z:\\b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
check(['a', '\\b', 'c'], ('', '\\', ['\\', 'b', 'c']))
check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
# UNC paths
check(['a', '\\\\b\\c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Collapsing and stripping excess slashes
check(['a', 'Z:\\\\b\\\\c\\', 'd\\'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
# UNC paths
check(['a', '\\\\b\\c\\\\', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Extended paths
check(['\\\\?\\c:\\'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
check(['\\\\?\\c:\\a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b']))
# Extended UNC paths (format is "\\?\UNC\server\share")
check(['\\\\?\\UNC\\b\\c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
check(['\\\\?\\UNC\\b\\c\\d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
check(['//?/UNC/b/c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
check(['//?/UNC/b/c/d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
# Second part has a root but not drive
check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c']))
check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
check(['//?/Z:/a', '/b', 'c'], ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c']))
def test_splitroot(self):
f = self.flavour.splitroot

View File

@ -3,12 +3,15 @@ import sys
import builtins
import contextlib
import difflib
import importlib.util
import inspect
import pydoc
import py_compile
import keyword
import _pickle
import pkgutil
import re
import stat
import string
import test.support
import time
@ -32,6 +35,10 @@ try:
except ImportError:
threading = None
class nonascii:
'Це не латиниця'
pass
if test.support.HAVE_DOCSTRINGS:
expected_data_docstrings = (
'dictionary for instance variables (if defined)',
@ -471,6 +478,11 @@ class PydocDocTest(unittest.TestCase):
self.assertEqual(expected, result,
"documentation for missing module found")
def test_not_ascii(self):
result = run_pydoc('test.test_pydoc.nonascii', PYTHONIOENCODING='ascii')
encoded = nonascii.__doc__.encode('ascii', 'backslashreplace')
self.assertIn(encoded, result)
def test_input_strip(self):
missing_module = " test.i_am_not_here "
result = str(run_pydoc(missing_module), 'ascii')
@ -557,6 +569,18 @@ class PydocDocTest(unittest.TestCase):
self.assertEqual(synopsis, expected)
def test_synopsis_sourceless_empty_doc(self):
with test.support.temp_cwd() as test_dir:
init_path = os.path.join(test_dir, 'foomod42.py')
cached_path = importlib.util.cache_from_source(init_path)
with open(init_path, 'w') as fobj:
fobj.write("foo = 1")
py_compile.compile(init_path)
synopsis = pydoc.synopsis(init_path, {})
self.assertIsNone(synopsis)
synopsis_cached = pydoc.synopsis(cached_path, {})
self.assertIsNone(synopsis_cached)
def test_splitdoc_with_description(self):
example_string = "I Am A Doc\n\n\nHere is my description"
self.assertEqual(pydoc.splitdoc(example_string),
@ -612,6 +636,7 @@ class PydocImportTest(PydocBaseTest):
def setUp(self):
self.test_dir = os.mkdir(TESTFN)
self.addCleanup(rmtree, TESTFN)
importlib.invalidate_caches()
def test_badimport(self):
# This tests the fix for issue 5230, where if pydoc found the module
@ -670,6 +695,22 @@ class PydocImportTest(PydocBaseTest):
self.assertEqual(out.getvalue(), '')
self.assertEqual(err.getvalue(), '')
def test_apropos_empty_doc(self):
pkgdir = os.path.join(TESTFN, 'walkpkg')
os.mkdir(pkgdir)
self.addCleanup(rmtree, pkgdir)
init_path = os.path.join(pkgdir, '__init__.py')
with open(init_path, 'w') as fobj:
fobj.write("foo = 1")
current_mode = stat.S_IMODE(os.stat(pkgdir).st_mode)
try:
os.chmod(pkgdir, current_mode & ~stat.S_IEXEC)
with self.restrict_walk_packages(path=[TESTFN]), captured_stdout() as stdout:
pydoc.apropos('')
self.assertIn('walkpkg', stdout.getvalue())
finally:
os.chmod(pkgdir, current_mode)
@unittest.skip('causes undesireable side-effects (#20128)')
def test_modules(self):
# See Helper.listmodules().

View File

@ -557,7 +557,7 @@ class ReTests(unittest.TestCase):
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
def test_lookahead(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
@ -571,6 +571,37 @@ class ReTests(unittest.TestCase):
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
# Group reference.
self.assertTrue(re.match(r'(a)b(?=\1)a', 'aba'))
self.assertIsNone(re.match(r'(a)b(?=\1)c', 'abac'))
# Named group reference.
self.assertTrue(re.match(r'(?P<g>a)b(?=(?P=g))a', 'aba'))
self.assertIsNone(re.match(r'(?P<g>a)b(?=(?P=g))c', 'abac'))
# Conditional group reference.
self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc'))
self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(2)c|x))c', 'abc'))
self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc'))
self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(1)b|x))c', 'abc'))
self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(1)c|x))c', 'abc'))
# Group used before defined.
self.assertTrue(re.match(r'(a)b(?=(?(2)x|c))(c)', 'abc'))
self.assertIsNone(re.match(r'(a)b(?=(?(2)b|x))(c)', 'abc'))
self.assertTrue(re.match(r'(a)b(?=(?(1)c|x))(c)', 'abc'))
def test_lookbehind(self):
self.assertTrue(re.match(r'ab(?<=b)c', 'abc'))
self.assertIsNone(re.match(r'ab(?<=c)c', 'abc'))
self.assertIsNone(re.match(r'ab(?<!b)c', 'abc'))
self.assertTrue(re.match(r'ab(?<!c)c', 'abc'))
# Group reference.
self.assertWarns(RuntimeWarning, re.compile, r'(a)a(?<=\1)c')
# Named group reference.
self.assertWarns(RuntimeWarning, re.compile, r'(?P<g>a)a(?<=(?P=g))c')
# Conditional group reference.
self.assertWarns(RuntimeWarning, re.compile, r'(a)b(?<=(?(1)b|x))c')
# Group used before defined.
self.assertWarns(RuntimeWarning, re.compile, r'(a)b(?<=(?(2)b|x))(c)')
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(b"abc", b"ABC", re.I).group(0), b"ABC")

View File

@ -9,7 +9,6 @@ import doctest
import math
import random
import sys
import types
import unittest
from decimal import Decimal

View File

@ -1922,7 +1922,7 @@ class POSIXProcessTestCase(BaseTestCase):
open_fds = set(fds)
# add a bunch more fds
for _ in range(9):
fd = os.open("/dev/null", os.O_RDONLY)
fd = os.open(os.devnull, os.O_RDONLY)
self.addCleanup(os.close, fd)
open_fds.add(fd)
@ -1984,7 +1984,7 @@ class POSIXProcessTestCase(BaseTestCase):
open_fds = set()
# Add a bunch more fds to pass down.
for _ in range(40):
fd = os.open("/dev/null", os.O_RDONLY)
fd = os.open(os.devnull, os.O_RDONLY)
open_fds.add(fd)
# Leave a two pairs of low ones available for use by the

View File

@ -1994,6 +1994,21 @@ class CommandLineTest(unittest.TestCase):
finally:
support.unlink(tar_name)
def test_create_command_compressed(self):
files = [support.findfile('tokenize_tests.txt'),
support.findfile('tokenize_tests-no-coding-cookie-'
'and-utf8-bom-sig-only.txt')]
for filetype in (GzipTest, Bz2Test, LzmaTest):
if not filetype.open:
continue
try:
tar_name = tmpname + '.' + filetype.suffix
out = self.tarfilecmd('-c', tar_name, *files)
with filetype.taropen(tar_name) as tar:
tar.getmembers()
finally:
support.unlink(tar_name)
def test_extract_command(self):
self.make_simple_tarfile(tmpname)
for opt in '-e', '--extract':

View File

@ -648,7 +648,14 @@ class PyZipFileTests(unittest.TestCase):
if name + 'o' not in namelist:
self.assertIn(name + 'c', namelist)
def requiresWriteAccess(self, path):
# effective_ids unavailable on windows
if not os.access(path, os.W_OK,
effective_ids=os.access in os.supports_effective_ids):
self.skipTest('requires write access to the installed location')
def test_write_pyfile(self):
self.requiresWriteAccess(os.path.dirname(__file__))
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
fn = __file__
if fn.endswith('.pyc') or fn.endswith('.pyo'):
@ -680,6 +687,7 @@ class PyZipFileTests(unittest.TestCase):
def test_write_python_package(self):
import email
packagedir = os.path.dirname(email.__file__)
self.requiresWriteAccess(packagedir)
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
zipfp.writepy(packagedir)
@ -693,6 +701,7 @@ class PyZipFileTests(unittest.TestCase):
def test_write_filtered_python_package(self):
import test
packagedir = os.path.dirname(test.__file__)
self.requiresWriteAccess(packagedir)
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
@ -721,6 +730,7 @@ class PyZipFileTests(unittest.TestCase):
def test_write_with_optimization(self):
import email
packagedir = os.path.dirname(email.__file__)
self.requiresWriteAccess(packagedir)
# use .pyc if running test in optimization mode,
# use .pyo if running test in debug mode
optlevel = 1 if __debug__ else 0

View File

@ -1288,7 +1288,7 @@ class TurtleScreen(TurtleScreenBase):
def _incrementudc(self):
"""Increment update counter."""
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNNING = True
TurtleScreen._RUNNING = True
raise Terminator
if self._tracing > 0:
self._updatecounter += 1
@ -3754,7 +3754,7 @@ class _Screen(TurtleScreen):
Turtle._screen = None
_Screen._root = None
_Screen._canvas = None
TurtleScreen._RUNNING = True
TurtleScreen._RUNNING = False
root.destroy()
def bye(self):
@ -3795,7 +3795,6 @@ class _Screen(TurtleScreen):
except AttributeError:
exit(0)
class Turtle(RawTurtle):
"""RawTurtle auto-creating (scrolled) canvas.
@ -3818,18 +3817,6 @@ class Turtle(RawTurtle):
Pen = Turtle
def _getpen():
"""Create the 'anonymous' turtle if not already present."""
if Turtle._pen is None:
Turtle._pen = Turtle()
return Turtle._pen
def _getscreen():
"""Create a TurtleScreen if not already present."""
if Turtle._screen is None:
Turtle._screen = Screen()
return Turtle._screen
def write_docstringdict(filename="turtle_docstringdict"):
"""Create and write docstring-dictionary to file.
@ -3952,26 +3939,38 @@ def _screen_docrevise(docstr):
## as functions. So we can enhance, change, add, delete methods to these
## classes and do not need to change anything here.
__func_body = """\
def {name}{paramslist}:
if {obj} is None:
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
{obj} = {init}
try:
return {obj}.{name}{argslist}
except TK.TclError:
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
raise
"""
for methodname in _tg_screen_functions:
pl1, pl2 = getmethparlist(eval('_Screen.' + methodname))
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = ("def %(key)s%(pl1)s: return _getscreen().%(key)s%(pl2)s" %
{'key':methodname, 'pl1':pl1, 'pl2':pl2})
exec(defstr)
eval(methodname).__doc__ = _screen_docrevise(eval('_Screen.'+methodname).__doc__)
def _make_global_funcs(functions, cls, obj, init, docrevise):
for methodname in functions:
method = getattr(cls, methodname)
pl1, pl2 = getmethparlist(method)
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = __func_body.format(obj=obj, init=init, name=methodname,
paramslist=pl1, argslist=pl2)
exec(defstr, globals())
globals()[methodname].__doc__ = docrevise(method.__doc__)
for methodname in _tg_turtle_functions:
pl1, pl2 = getmethparlist(eval('Turtle.' + methodname))
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = ("def %(key)s%(pl1)s: return _getpen().%(key)s%(pl2)s" %
{'key':methodname, 'pl1':pl1, 'pl2':pl2})
exec(defstr)
eval(methodname).__doc__ = _turtle_docrevise(eval('Turtle.'+methodname).__doc__)
_make_global_funcs(_tg_screen_functions, _Screen,
'Turtle._screen', 'Screen()', _screen_docrevise)
_make_global_funcs(_tg_turtle_functions, Turtle,
'Turtle._pen', 'Turtle()', _turtle_docrevise)
done = mainloop

View File

@ -344,6 +344,8 @@ class DemoWindow(object):
else:
self.state = DONE
except turtle.Terminator:
if self.root is None:
return
self.state = DONE
result = "stopped!"
if self.state == DONE:
@ -369,7 +371,9 @@ class DemoWindow(object):
turtle.TurtleScreen._RUNNING = False
def _destroy(self):
turtle.TurtleScreen._RUNNING = False
self.root.destroy()
self.root = None
def main():

View File

@ -506,6 +506,7 @@ Eric Groo
Dag Gruneau
Filip Gruszczyński
Thomas Guettler
Yuyang Guo
Anuj Gupta
Michael Guravage
Lars Gustäbel

View File

@ -13,6 +13,44 @@ Core and Builtins
Library
-------
- Issue #6639: Module-level turtle functions no longer raise TclError after
closing the window.
- Issues #814253, #9179: Warnings now are raised when group references and
conditional group references are used in lookbehind assertions in regular
expressions.
- Issue #23215: Multibyte codecs with custom error handlers that ignores errors
consumed too much memory and raised SystemError or MemoryError.
Original patch by Aleksi Torhamo.
- Issue #5700: io.FileIO() called flush() after closing the file.
flush() was not called in close() if closefd=False.
- Issue #23374: Fixed pydoc failure with non-ASCII files when stdout encoding
differs from file system encoding (e.g. on Mac OS).
- Issue #23481: Remove RC4 from the SSL module's default cipher list.
- Issue #21548: Fix pydoc.synopsis() and pydoc.apropos() on modules with empty
docstrings.
- Issue #22885: Fixed arbitrary code execution vulnerability in the dbm.dumb
module. Original patch by Claudiu Popa.
- Issue #23146: Fix mishandling of absolute Windows paths with forward
slashes in pathlib.
- Issue #23421: Fixed compression in tarfile CLI. Patch by wdv4758h.
- Issue #23361: Fix possible overflow in Windows subprocess creation code.
Build
-----
- Issue #23445: pydebug builds now use "gcc -Og" where possible, to make
the resulting executable faster.
What's New in Python 3.4.3?
===========================

View File

@ -21,7 +21,6 @@ README The file you're reading now
README.AIX Information about using Python on AIX
README.coverity Information about running Coverity's Prevent on Python
README.valgrind Information for Valgrind users, see valgrind-python.supp
RPM (Old) tools to build RPMs
SpecialBuilds.txt Describes extra symbols you can set for debug builds
svnmap.txt Map of old SVN revs and branches to hg changeset ids
valgrind-python.supp Valgrind suppression file, see README.valgrind

View File

@ -1,33 +0,0 @@
This directory contains support file used to build RPM releases of
Python. Its contents are maintained by Sean Reifschneider
<jafo@tummy.com>.
If you wish to build RPMs from the base Python release tar-file, note
that you will have to download the
"doc/<version>/html-<version>.tar.bz2"
file from python.org and place it into your "SOURCES" directory for
the build to complete. This is the same directory that you place the
Python-2.3.1 release tar-file in. You can then use the ".spec" file in
this directory to build RPMs.
You may also wish to pursue RPMs provided by distribution makers to see if
they have one suitable for your uses. If, for example, you just want a
slightly newer version of Python than what the distro provides, you could
pick up the closest SRPM your distro provides, and then modify it to
the newer version, and build that. It may be as simple as just changing
the "version" information in the spec file (or it may require fixing
patches).
NOTE: I am *NOT* recommending just using the binary RPM, and never do an
install with "--force" or "--nodeps".
Also worth pursuing may be newer versions provided by similar distros. For
example, a Python 3 SRPM from Fedora may be a good baseline to try building
on CentOS.
Many newer SRPMs won't install on older distros because of format changes.
You can manually extract these SRPMS with:
mkdir foo
cd foo
rpm2cpio <../python3-*.src.rpm | cpio -ivd

View File

@ -1,389 +0,0 @@
##########################
# User-modifiable configs
##########################
# Is the resulting package and the installed binary named "python" or
# "python2"?
#WARNING: Commenting out doesn't work. Last line is what's used.
%define config_binsuffix none
%define config_binsuffix 2.6
# Build tkinter? "auto" enables it if /usr/bin/wish exists.
#WARNING: Commenting out doesn't work. Last line is what's used.
%define config_tkinter no
%define config_tkinter yes
%define config_tkinter auto
# Use pymalloc? The last line (commented or not) determines wether
# pymalloc is used.
#WARNING: Commenting out doesn't work. Last line is what's used.
%define config_pymalloc no
%define config_pymalloc yes
# Enable IPV6?
#WARNING: Commenting out doesn't work. Last line is what's used.
%define config_ipv6 yes
%define config_ipv6 no
# Build shared libraries or .a library?
#WARNING: Commenting out doesn't work. Last line is what's used.
%define config_sharedlib no
%define config_sharedlib yes
# Location of the HTML directory.
%define config_htmldir /var/www/html/python
#################################
# End of user-modifiable configs
#################################
%define name python
#--start constants--
%define version 3.4.3
%define libvers 3.4
#--end constants--
%define release 1pydotorg
%define __prefix /usr
# kludge to get around rpm <percent>define weirdness
%define ipv6 %(if [ "%{config_ipv6}" = yes ]; then echo --enable-ipv6; else echo --disable-ipv6; fi)
%define pymalloc %(if [ "%{config_pymalloc}" = yes ]; then echo --with-pymalloc; else echo --without-pymalloc; fi)
%define binsuffix %(if [ "%{config_binsuffix}" = none ]; then echo ; else echo "%{config_binsuffix}"; fi)
%define include_tkinter %(if [ \\( "%{config_tkinter}" = auto -a -f /usr/bin/wish \\) -o "%{config_tkinter}" = yes ]; then echo 1; else echo 0; fi)
%define libdirname %(( uname -m | egrep -q '_64$' && [ -d /usr/lib64 ] && echo lib64 ) || echo lib)
%define sharedlib %(if [ "%{config_sharedlib}" = yes ]; then echo --enable-shared; else echo ; fi)
%define include_sharedlib %(if [ "%{config_sharedlib}" = yes ]; then echo 1; else echo 0; fi)
# detect if documentation is available
%define include_docs %(if [ -f "%{_sourcedir}/html-%{version}.tar.bz2" ]; then echo 1; else echo 0; fi)
Summary: An interpreted, interactive, object-oriented programming language.
Name: %{name}%{binsuffix}
Version: %{version}
Release: %{release}
License: PSF
Group: Development/Languages
Source: Python-%{version}.tar.bz2
%if %{include_docs}
Source1: html-%{version}.tar.bz2
%endif
BuildRoot: %{_tmppath}/%{name}-%{version}-root
BuildPrereq: expat-devel
BuildPrereq: db4-devel
BuildPrereq: gdbm-devel
BuildPrereq: sqlite-devel
Prefix: %{__prefix}
Packager: Sean Reifschneider <jafo-rpms@tummy.com>
%description
Python is an interpreted, interactive, object-oriented programming
language. It incorporates modules, exceptions, dynamic typing, very high
level dynamic data types, and classes. Python combines remarkable power
with very clear syntax. It has interfaces to many system calls and
libraries, as well as to various window systems, and is extensible in C or
C++. It is also usable as an extension language for applications that need
a programmable interface. Finally, Python is portable: it runs on many
brands of UNIX, on PCs under Windows, MS-DOS, and on the Mac.
%package devel
Summary: The libraries and header files needed for Python extension development.
Prereq: python%{binsuffix} = %{PACKAGE_VERSION}
Group: Development/Libraries
%description devel
The Python programming language's interpreter can be extended with
dynamically loaded extensions and can be embedded in other programs.
This package contains the header files and libraries needed to do
these types of tasks.
Install python-devel if you want to develop Python extensions. The
python package will also need to be installed. You'll probably also
want to install the python-docs package, which contains Python
documentation.
%if %{include_tkinter}
%package tkinter
Summary: A graphical user interface for the Python scripting language.
Group: Development/Languages
Prereq: python%{binsuffix} = %{PACKAGE_VERSION}-%{release}
%description tkinter
The Tkinter (Tk interface) program is an graphical user interface for
the Python scripting language.
You should install the tkinter package if you'd like to use a graphical
user interface for Python programming.
%endif
%package tools
Summary: A collection of development tools included with Python.
Group: Development/Tools
Prereq: python%{binsuffix} = %{PACKAGE_VERSION}-%{release}
%description tools
The Python package includes several development tools that are used
to build python programs. This package contains a selection of those
tools, including the IDLE Python IDE.
Install python-tools if you want to use these tools to develop
Python programs. You will also need to install the python and
tkinter packages.
%if %{include_docs}
%package docs
Summary: Python-related documentation.
Group: Development/Documentation
%description docs
Documentation relating to the Python programming language in HTML and info
formats.
%endif
%changelog
* Mon Dec 20 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.4-2pydotorg]
- Changing the idle wrapper so that it passes arguments to idle.
* Tue Oct 19 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.4b1-1pydotorg]
- Updating to 2.4.
* Thu Jul 22 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.4-3pydotorg]
- Paul Tiemann fixes for %{prefix}.
- Adding permission changes for directory as suggested by reimeika.ca
- Adding code to detect when it should be using lib64.
- Adding a define for the location of /var/www/html for docs.
* Thu May 27 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.4-2pydotorg]
- Including changes from Ian Holsman to build under Red Hat 7.3.
- Fixing some problems with the /usr/local path change.
* Sat Mar 27 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.2-3pydotorg]
- Being more agressive about finding the paths to fix for
#!/usr/local/bin/python.
* Sat Feb 07 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.3-2pydotorg]
- Adding code to remove "#!/usr/local/bin/python" from particular files and
causing the RPM build to terminate if there are any unexpected files
which have that line in them.
* Mon Oct 13 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.2-1pydotorg]
- Adding code to detect wether documentation is available to build.
* Fri Sep 19 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.1-1pydotorg]
- Updating to the 2.3.1 release.
* Mon Feb 24 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3b1-1pydotorg]
- Updating to 2.3b1 release.
* Mon Feb 17 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3a1-1]
- Updating to 2.3 release.
* Sun Dec 23 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2-2]
- Added -docs package.
- Added "auto" config_tkinter setting which only enables tk if
/usr/bin/wish exists.
* Sat Dec 22 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2-1]
- Updated to 2.2.
- Changed the extension to "2" from "2.2".
* Tue Nov 18 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2c1-1]
- Updated to 2.2c1.
* Thu Nov 1 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2b1-3]
- Changed the way the sed for fixing the #! in pydoc works.
* Wed Oct 24 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2b1-2]
- Fixed missing "email" package, thanks to anonymous report on sourceforge.
- Fixed missing "compiler" package.
* Mon Oct 22 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2b1-1]
- Updated to 2.2b1.
* Mon Oct 9 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2a4-4]
- otto@balinor.mat.unimi.it mentioned that the license file is missing.
* Sun Sep 30 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2a4-3]
- Ignacio Vazquez-Abrams pointed out that I had a spruious double-quote in
the spec files. Thanks.
* Wed Jul 25 2001 Sean Reifschneider <jafo-rpms@tummy.com>
[Release 2.2a1-1]
- Updated to 2.2a1 release.
- Changed idle and pydoc to use binsuffix macro
#######
# PREP
#######
%prep
%setup -n Python-%{version}
########
# BUILD
########
%build
echo "Setting for ipv6: %{ipv6}"
echo "Setting for pymalloc: %{pymalloc}"
echo "Setting for binsuffix: %{binsuffix}"
echo "Setting for include_tkinter: %{include_tkinter}"
echo "Setting for libdirname: %{libdirname}"
echo "Setting for sharedlib: %{sharedlib}"
echo "Setting for include_sharedlib: %{include_sharedlib}"
./configure --enable-unicode=ucs4 %{sharedlib} %{ipv6} %{pymalloc} --prefix=%{__prefix}
make
##########
# INSTALL
##########
%install
# set the install path
echo '[install_scripts]' >setup.cfg
echo 'install_dir='"${RPM_BUILD_ROOT}%{__prefix}/bin" >>setup.cfg
[ -d "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != "/" ] && rm -rf $RPM_BUILD_ROOT
mkdir -p $RPM_BUILD_ROOT%{__prefix}/%{libdirname}/python%{libvers}/lib-dynload
make prefix=$RPM_BUILD_ROOT%{__prefix} install
# REPLACE PATH IN PYDOC
if [ ! -z "%{binsuffix}" ]
then
(
cd $RPM_BUILD_ROOT%{__prefix}/bin
mv pydoc pydoc.old
sed 's|#!.*|#!%{__prefix}/bin/env python'%{binsuffix}'|' \
pydoc.old >pydoc
chmod 755 pydoc
rm -f pydoc.old
)
fi
# add the binsuffix
if [ ! -z "%{binsuffix}" ]
then
rm -f $RPM_BUILD_ROOT%{__prefix}/bin/python[0-9a-zA-Z]*
( cd $RPM_BUILD_ROOT%{__prefix}/bin;
for file in *; do mv "$file" "$file"%{binsuffix}; done )
( cd $RPM_BUILD_ROOT%{_mandir}/man1; mv python.1 python%{binsuffix}.1 )
fi
########
# Tools
echo '#!%{__prefix}/bin/env python%{binsuffix}' >${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
echo 'import os, sys' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
echo 'os.execvp("%{__prefix}/bin/python%{binsuffix}", ["%{__prefix}/bin/python%{binsuffix}", "%{__prefix}/lib/python%{libvers}/idlelib/idle.py"] + sys.argv[1:])' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
echo 'print "Failed to exec Idle"' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
echo 'sys.exit(1)' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
chmod 755 $RPM_BUILD_ROOT%{__prefix}/bin/idle%{binsuffix}
cp -a Tools $RPM_BUILD_ROOT%{__prefix}/%{libdirname}/python%{libvers}
# MAKE FILE LISTS
rm -f mainpkg.files
find "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers} -type f |
sed "s|^${RPM_BUILD_ROOT}|/|" |
grep -v -e '/python%{libvers}/config$' -e '_tkinter.so$' >mainpkg.files
find "$RPM_BUILD_ROOT""%{__prefix}"/bin -type f -o -type l |
sed "s|^${RPM_BUILD_ROOT}|/|" |
grep -v -e '/bin/2to3%{binsuffix}$' |
grep -v -e '/bin/pydoc%{binsuffix}$' |
grep -v -e '/bin/smtpd.py%{binsuffix}$' |
grep -v -e '/bin/idle%{binsuffix}$' >>mainpkg.files
rm -f tools.files
find "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers}/idlelib \
"$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers}/Tools -type f |
sed "s|^${RPM_BUILD_ROOT}|/|" >tools.files
echo "%{__prefix}"/bin/2to3%{binsuffix} >>tools.files
echo "%{__prefix}"/bin/pydoc%{binsuffix} >>tools.files
echo "%{__prefix}"/bin/smtpd.py%{binsuffix} >>tools.files
echo "%{__prefix}"/bin/idle%{binsuffix} >>tools.files
######
# Docs
%if %{include_docs}
mkdir -p "$RPM_BUILD_ROOT"%{config_htmldir}
(
cd "$RPM_BUILD_ROOT"%{config_htmldir}
bunzip2 < %{SOURCE1} | tar x
)
%endif
# fix the #! line in installed files
find "$RPM_BUILD_ROOT" -type f -print0 |
xargs -0 grep -l /usr/local/bin/python | while read file
do
FIXFILE="$file"
sed 's|^#!.*python|#!%{__prefix}/bin/env python'"%{binsuffix}"'|' \
"$FIXFILE" >/tmp/fix-python-path.$$
cat /tmp/fix-python-path.$$ >"$FIXFILE"
rm -f /tmp/fix-python-path.$$
done
# check to see if there are any straggling #! lines
find "$RPM_BUILD_ROOT" -type f | xargs egrep -n '^#! */usr/local/bin/python' \
| grep ':1:#!' >/tmp/python-rpm-files.$$ || true
if [ -s /tmp/python-rpm-files.$$ ]
then
echo '*****************************************************'
cat /tmp/python-rpm-files.$$
cat <<@EOF
*****************************************************
There are still files referencing /usr/local/bin/python in the
install directory. They are listed above. Please fix the .spec
file and try again. If you are an end-user, you probably want
to report this to jafo-rpms@tummy.com as well.
*****************************************************
@EOF
rm -f /tmp/python-rpm-files.$$
exit 1
fi
rm -f /tmp/python-rpm-files.$$
########
# CLEAN
########
%clean
[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf $RPM_BUILD_ROOT
rm -f mainpkg.files tools.files
########
# FILES
########
%files -f mainpkg.files
%defattr(-,root,root)
%doc Misc/README Misc/cheatsheet Misc/Porting
%doc LICENSE Misc/ACKS Misc/HISTORY Misc/NEWS
%{_mandir}/man1/python%{binsuffix}.1*
%attr(755,root,root) %dir %{__prefix}/include/python%{libvers}
%attr(755,root,root) %dir %{__prefix}/%{libdirname}/python%{libvers}/
%if %{include_sharedlib}
%{__prefix}/%{libdirname}/libpython*
%endif
%files devel
%defattr(-,root,root)
%{__prefix}/include/python%{libvers}/*.h
%{__prefix}/%{libdirname}/python%{libvers}/config
%files -f tools.files tools
%defattr(-,root,root)
%if %{include_tkinter}
%files tkinter
%defattr(-,root,root)
%{__prefix}/%{libdirname}/python%{libvers}/tkinter
%{__prefix}/%{libdirname}/python%{libvers}/lib-dynload/_tkinter.so*
%endif
%if %{include_docs}
%files docs
%defattr(-,root,root)
%{config_htmldir}/*
%endif

View File

@ -4305,8 +4305,11 @@ Array_subscript(PyObject *myself, PyObject *item)
slicelen);
}
dest = (wchar_t *)PyMem_Malloc(
slicelen * sizeof(wchar_t));
dest = PyMem_New(wchar_t, slicelen);
if (dest == NULL) {
PyErr_NoMemory();
return NULL;
}
for (cur = start, i = 0; i < slicelen;
cur += step, i++) {
@ -4986,7 +4989,7 @@ Pointer_subscript(PyObject *myself, PyObject *item)
return PyUnicode_FromWideChar(ptr + start,
len);
}
dest = (wchar_t *)PyMem_Malloc(len * sizeof(wchar_t));
dest = PyMem_New(wchar_t, len);
if (dest == NULL)
return PyErr_NoMemory();
for (cur = start, i = 0; i < len; cur += step, i++) {

View File

@ -76,14 +76,18 @@ PyCStgDict_clone(StgDictObject *dst, StgDictObject *src)
if (src->format) {
dst->format = PyMem_Malloc(strlen(src->format) + 1);
if (dst->format == NULL)
if (dst->format == NULL) {
PyErr_NoMemory();
return -1;
}
strcpy(dst->format, src->format);
}
if (src->shape) {
dst->shape = PyMem_Malloc(sizeof(Py_ssize_t) * src->ndim);
if (dst->shape == NULL)
if (dst->shape == NULL) {
PyErr_NoMemory();
return -1;
}
memcpy(dst->shape, src->shape,
sizeof(Py_ssize_t) * src->ndim);
}
@ -380,7 +384,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct
union_size = 0;
total_align = align ? align : 1;
stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT;
stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (basedict->length + len + 1));
stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, basedict->length + len + 1);
if (stgdict->ffi_type_pointer.elements == NULL) {
PyErr_NoMemory();
return -1;
@ -398,7 +402,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct
union_size = 0;
total_align = 1;
stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT;
stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (len + 1));
stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, len + 1);
if (stgdict->ffi_type_pointer.elements == NULL) {
PyErr_NoMemory();
return -1;

View File

@ -126,11 +126,18 @@ internal_close(fileio *self)
static PyObject *
fileio_close(fileio *self)
{
PyObject *res;
PyObject *exc, *val, *tb;
int rc;
_Py_IDENTIFIER(close);
res = _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type,
&PyId_close, "O", self);
if (!self->closefd) {
self->fd = -1;
Py_RETURN_NONE;
return res;
}
if (res == NULL)
PyErr_Fetch(&exc, &val, &tb);
if (self->finalizing) {
PyObject *r = fileio_dealloc_warn(self, (PyObject *) self);
if (r)
@ -138,12 +145,12 @@ fileio_close(fileio *self)
else
PyErr_Clear();
}
errno = internal_close(self);
if (errno < 0)
return NULL;
return _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type,
&PyId_close, "O", self);
rc = internal_close(self);
if (res == NULL)
_PyErr_ChainExceptions(exc, val, tb);
if (rc < 0)
Py_CLEAR(res);
return res;
}
static PyObject *

View File

@ -254,7 +254,7 @@ PyLocale_strxfrm(PyObject* self, PyObject* args)
/* assume no change in size, first */
n1 = n1 + 1;
buf = PyMem_Malloc(n1 * sizeof(wchar_t));
buf = PyMem_New(wchar_t, n1);
if (!buf) {
PyErr_NoMemory();
goto exit;

View File

@ -3838,10 +3838,11 @@ static int _setup_ssl_threads(void) {
if (_ssl_locks == NULL) {
_ssl_locks_count = CRYPTO_num_locks();
_ssl_locks = (PyThread_type_lock *)
PyMem_Malloc(sizeof(PyThread_type_lock) * _ssl_locks_count);
if (_ssl_locks == NULL)
_ssl_locks = PyMem_New(PyThread_type_lock, _ssl_locks_count);
if (_ssl_locks == NULL) {
PyErr_NoMemory();
return 0;
}
memset(_ssl_locks, 0,
sizeof(PyThread_type_lock) * _ssl_locks_count);
for (i = 0; i < _ssl_locks_count; i++) {

View File

@ -850,7 +850,7 @@ seq_as_ssize_array(PyObject *seq, Py_ssize_t len, int is_shape)
Py_ssize_t *dest;
Py_ssize_t x, i;
dest = PyMem_Malloc(len * (sizeof *dest));
dest = PyMem_New(Py_ssize_t, len);
if (dest == NULL) {
PyErr_NoMemory();
return NULL;

View File

@ -1516,7 +1516,7 @@ unicode_aswidechar(PyObject *self, PyObject *args)
if (!PyArg_ParseTuple(args, "Un", &unicode, &buflen))
return NULL;
buffer = PyMem_Malloc(buflen * sizeof(wchar_t));
buffer = PyMem_New(wchar_t, buflen);
if (buffer == NULL)
return PyErr_NoMemory();

View File

@ -34,7 +34,7 @@ Copyright (C) 1994 Steen Lumholt.
#endif
#define CHECK_SIZE(size, elemsize) \
((size_t)(size) <= Py_MAX((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize)))
((size_t)(size) <= Py_MIN((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize)))
/* Starting with Tcl 8.4, many APIs offer const-correctness. Unfortunately,
making _tkinter correct for this API means to break earlier

View File

@ -535,13 +535,23 @@ getenvironment(PyObject* environment)
"environment can only contain strings");
goto error;
}
if (totalsize > PY_SSIZE_T_MAX - PyUnicode_GET_LENGTH(key) - 1) {
PyErr_SetString(PyExc_OverflowError, "environment too long");
goto error;
}
totalsize += PyUnicode_GET_LENGTH(key) + 1; /* +1 for '=' */
if (totalsize > PY_SSIZE_T_MAX - PyUnicode_GET_LENGTH(value) - 1) {
PyErr_SetString(PyExc_OverflowError, "environment too long");
goto error;
}
totalsize += PyUnicode_GET_LENGTH(value) + 1; /* +1 for '\0' */
}
buffer = PyMem_Malloc(totalsize * sizeof(Py_UCS4));
if (! buffer)
buffer = PyMem_NEW(Py_UCS4, totalsize);
if (! buffer) {
PyErr_NoMemory();
goto error;
}
p = buffer;
end = buffer + totalsize;

View File

@ -228,13 +228,13 @@ ascii_buffer_converter(PyObject *arg, Py_buffer *buf)
if (PyObject_GetBuffer(arg, buf, PyBUF_SIMPLE) != 0) {
PyErr_Format(PyExc_TypeError,
"argument should be bytes, buffer or ASCII string, "
"not %R", Py_TYPE(arg));
"not '%.100s'", Py_TYPE(arg)->tp_name);
return 0;
}
if (!PyBuffer_IsContiguous(buf, 'C')) {
PyErr_Format(PyExc_TypeError,
"argument should be a contiguous buffer, "
"not %R", Py_TYPE(arg));
"not '%.100s'", Py_TYPE(arg)->tp_name);
PyBuffer_Release(buf);
return 0;
}

View File

@ -182,8 +182,10 @@ expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize)
orgsize = PyBytes_GET_SIZE(buf->outobj);
incsize = (esize < (orgsize >> 1) ? (orgsize >> 1) | 1 : esize);
if (orgsize > PY_SSIZE_T_MAX - incsize)
if (orgsize > PY_SSIZE_T_MAX - incsize) {
PyErr_NoMemory();
return -1;
}
if (_PyBytes_Resize(&buf->outobj, orgsize + incsize) == -1)
return -1;
@ -194,11 +196,11 @@ expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize)
return 0;
}
#define REQUIRE_ENCODEBUFFER(buf, s) { \
if ((s) < 1 || (buf)->outbuf + (s) > (buf)->outbuf_end) \
#define REQUIRE_ENCODEBUFFER(buf, s) do { \
if ((s) < 0 || (s) > (buf)->outbuf_end - (buf)->outbuf) \
if (expand_encodebuffer(buf, s) == -1) \
goto errorexit; \
}
} while(0)
/**
@ -332,10 +334,11 @@ multibytecodec_encerror(MultibyteCodec *codec,
assert(PyBytes_Check(retstr));
retstrsize = PyBytes_GET_SIZE(retstr);
REQUIRE_ENCODEBUFFER(buf, retstrsize);
memcpy(buf->outbuf, PyBytes_AS_STRING(retstr), retstrsize);
buf->outbuf += retstrsize;
if (retstrsize > 0) {
REQUIRE_ENCODEBUFFER(buf, retstrsize);
memcpy(buf->outbuf, PyBytes_AS_STRING(retstr), retstrsize);
buf->outbuf += retstrsize;
}
newpos = PyLong_AsSsize_t(PyTuple_GET_ITEM(retobj, 1));
if (newpos < 0 && !PyErr_Occurred())

View File

@ -911,12 +911,12 @@ faulthandler_fatal_error_py(PyObject *self, PyObject *args)
}
#if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
static void*
stack_overflow(void *min_sp, void *max_sp, size_t *depth)
static Py_uintptr_t
stack_overflow(Py_uintptr_t min_sp, Py_uintptr_t max_sp, size_t *depth)
{
/* allocate 4096 bytes on the stack at each call */
unsigned char buffer[4096];
void *sp = &buffer;
Py_uintptr_t sp = (Py_uintptr_t)&buffer;
*depth += 1;
if (sp < min_sp || max_sp < sp)
return sp;
@ -929,7 +929,8 @@ static PyObject *
faulthandler_stack_overflow(PyObject *self)
{
size_t depth, size;
char *sp = (char *)&depth, *stop;
Py_uintptr_t sp = (Py_uintptr_t)&depth;
Py_uintptr_t stop;
depth = 0;
stop = stack_overflow(sp - STACK_OVERFLOW_MAX_SIZE,

View File

@ -735,7 +735,7 @@ calculate_path(void)
bufsz += wcslen(zip_path) + 1;
bufsz += wcslen(exec_prefix) + 1;
buf = (wchar_t *)PyMem_Malloc(bufsz * sizeof(wchar_t));
buf = PyMem_New(wchar_t, bufsz);
if (buf == NULL) {
Py_FatalError(
"Not enough memory for dynamic PYTHONPATH");

View File

@ -1638,7 +1638,7 @@ get_target_path(HANDLE hdl, wchar_t **target_path)
if(!buf_size)
return FALSE;
buf = (wchar_t *)PyMem_Malloc((buf_size+1)*sizeof(wchar_t));
buf = PyMem_New(wchar_t, buf_size+1);
if (!buf) {
SetLastError(ERROR_OUTOFMEMORY);
return FALSE;
@ -3627,7 +3627,7 @@ _listdir_windows_no_opendir(path_t *path, PyObject *list)
len = wcslen(path->wide);
}
/* The +5 is so we can append "\\*.*\0" */
wnamebuf = PyMem_Malloc((len + 5) * sizeof(wchar_t));
wnamebuf = PyMem_New(wchar_t, len + 5);
if (!wnamebuf) {
PyErr_NoMemory();
goto exit;
@ -3917,7 +3917,7 @@ posix__getfullpathname(PyObject *self, PyObject *args)
Py_ARRAY_LENGTH(woutbuf),
woutbuf, &wtemp);
if (result > Py_ARRAY_LENGTH(woutbuf)) {
woutbufp = PyMem_Malloc(result * sizeof(wchar_t));
woutbufp = PyMem_New(wchar_t, result);
if (!woutbufp)
return PyErr_NoMemory();
result = GetFullPathNameW(wpath, result, woutbufp, &wtemp);
@ -3997,7 +3997,7 @@ posix__getfinalpathname(PyObject *self, PyObject *args)
if(!buf_size)
return win32_error_object("GetFinalPathNameByHandle", po);
target_path = (wchar_t *)PyMem_Malloc((buf_size+1)*sizeof(wchar_t));
target_path = PyMem_New(wchar_t, buf_size+1);
if(!target_path)
return PyErr_NoMemory();
@ -4082,7 +4082,7 @@ posix__getvolumepathname(PyObject *self, PyObject *args)
return NULL;
}
mountpath = (wchar_t *)PyMem_Malloc(buflen * sizeof(wchar_t));
mountpath = PyMem_New(wchar_t, buflen);
if (mountpath == NULL)
return PyErr_NoMemory();
@ -6213,9 +6213,9 @@ posix_getgrouplist(PyObject *self, PyObject *args)
#endif
#ifdef __APPLE__
groups = PyMem_Malloc(ngroups * sizeof(int));
groups = PyMem_New(int, ngroups);
#else
groups = PyMem_Malloc(ngroups * sizeof(gid_t));
groups = PyMem_New(gid_t, ngroups);
#endif
if (groups == NULL)
return PyErr_NoMemory();
@ -6293,7 +6293,7 @@ posix_getgroups(PyObject *self, PyObject *noargs)
/* groups will fit in existing array */
alt_grouplist = grouplist;
} else {
alt_grouplist = PyMem_Malloc(n * sizeof(gid_t));
alt_grouplist = PyMem_New(gid_t, n);
if (alt_grouplist == NULL) {
errno = EINVAL;
return posix_error();
@ -6319,7 +6319,7 @@ posix_getgroups(PyObject *self, PyObject *noargs)
/* Avoid malloc(0) */
alt_grouplist = grouplist;
} else {
alt_grouplist = PyMem_Malloc(n * sizeof(gid_t));
alt_grouplist = PyMem_New(gid_t, n);
if (alt_grouplist == NULL) {
errno = EINVAL;
return posix_error();

View File

@ -928,7 +928,7 @@ xmlparse_ExternalEntityParserCreate(xmlparseobject *self, PyObject *args)
for (i = 0; handler_info[i].name != NULL; i++)
/* do nothing */;
new_parser->handlers = PyMem_Malloc(sizeof(PyObject *) * i);
new_parser->handlers = PyMem_New(PyObject *, i);
if (!new_parser->handlers) {
Py_DECREF(new_parser);
return PyErr_NoMemory();
@ -1121,7 +1121,7 @@ newxmlparseobject(char *encoding, char *namespace_separator, PyObject *intern)
for (i = 0; handler_info[i].name != NULL; i++)
/* do nothing */;
self->handlers = PyMem_Malloc(sizeof(PyObject *) * i);
self->handlers = PyMem_New(PyObject *, i);
if (!self->handlers) {
Py_DECREF(self);
return PyErr_NoMemory();

View File

@ -4126,9 +4126,11 @@ socket_gethostname(PyObject *self, PyObject *unused)
/* MSDN says ERROR_MORE_DATA may occur because DNS allows longer
names */
name = PyMem_Malloc(size * sizeof(wchar_t));
if (!name)
name = PyMem_New(wchar_t, size);
if (!name) {
PyErr_NoMemory();
return NULL;
}
if (!GetComputerNameExW(ComputerNamePhysicalDnsHostname,
name,
&size))

View File

@ -556,7 +556,7 @@ nfd_nfkd(PyObject *self, PyObject *input, int k)
/* Overallocate at most 10 characters. */
space = (isize > 10 ? 10 : isize) + isize;
osize = space;
output = PyMem_Malloc(space * sizeof(Py_UCS4));
output = PyMem_New(Py_UCS4, space);
if (!output) {
PyErr_NoMemory();
return NULL;
@ -703,7 +703,7 @@ nfc_nfkc(PyObject *self, PyObject *input, int k)
/* We allocate a buffer for the output.
If we find that we made no changes, we still return
the NFD result. */
output = PyMem_Malloc(len * sizeof(Py_UCS4));
output = PyMem_New(Py_UCS4, len);
if (!output) {
PyErr_NoMemory();
Py_DECREF(result);

View File

@ -233,7 +233,7 @@ make_filename(PyObject *prefix, PyObject *name)
Py_ssize_t len;
len = PyUnicode_GET_LENGTH(prefix) + PyUnicode_GET_LENGTH(name) + 1;
p = buf = PyMem_Malloc(sizeof(Py_UCS4) * len);
p = buf = PyMem_New(Py_UCS4, len);
if (buf == NULL) {
PyErr_NoMemory();
return NULL;

View File

@ -1832,7 +1832,8 @@ merge_collapse(MergeState *ms)
assert(ms);
while (ms->n > 1) {
Py_ssize_t n = ms->n - 2;
if (n > 0 && p[n-1].len <= p[n].len + p[n+1].len) {
if ((n > 0 && p[n-1].len <= p[n].len + p[n+1].len) ||
(n > 1 && p[n-2].len <= p[n-1].len + p[n].len)) {
if (p[n-1].len < p[n+1].len)
--n;
if (merge_at(ms, n) < 0)

View File

@ -1535,6 +1535,10 @@ _PyUnicode_Ready(PyObject *unicode)
/* in case the native representation is 2-bytes, we need to allocate a
new normalized 4-byte version. */
length_wo_surrogates = _PyUnicode_WSTR_LENGTH(unicode) - num_surrogates;
if (length_wo_surrogates > PY_SSIZE_T_MAX / 4 - 1) {
PyErr_NoMemory();
return -1;
}
_PyUnicode_DATA_ANY(unicode) = PyObject_MALLOC(4 * (length_wo_surrogates + 1));
if (!_PyUnicode_DATA_ANY(unicode)) {
PyErr_NoMemory();
@ -2186,7 +2190,7 @@ _PyUnicode_AsKind(PyObject *s, unsigned int kind)
}
switch (kind) {
case PyUnicode_2BYTE_KIND:
result = PyMem_Malloc(len * sizeof(Py_UCS2));
result = PyMem_New(Py_UCS2, len);
if (!result)
return PyErr_NoMemory();
assert(skind == PyUnicode_1BYTE_KIND);
@ -2197,7 +2201,7 @@ _PyUnicode_AsKind(PyObject *s, unsigned int kind)
result);
return result;
case PyUnicode_4BYTE_KIND:
result = PyMem_Malloc(len * sizeof(Py_UCS4));
result = PyMem_New(Py_UCS4, len);
if (!result)
return PyErr_NoMemory();
if (skind == PyUnicode_2BYTE_KIND) {
@ -2239,11 +2243,7 @@ as_ucs4(PyObject *string, Py_UCS4 *target, Py_ssize_t targetsize,
if (copy_null)
targetlen++;
if (!target) {
if (PY_SSIZE_T_MAX / sizeof(Py_UCS4) < targetlen) {
PyErr_NoMemory();
return NULL;
}
target = PyMem_Malloc(targetlen * sizeof(Py_UCS4));
target = PyMem_New(Py_UCS4, targetlen);
if (!target) {
PyErr_NoMemory();
return NULL;
@ -2852,12 +2852,7 @@ PyUnicode_AsWideCharString(PyObject *unicode,
buflen = unicode_aswidechar(unicode, NULL, 0);
if (buflen == -1)
return NULL;
if (PY_SSIZE_T_MAX / sizeof(wchar_t) < buflen) {
PyErr_NoMemory();
return NULL;
}
buffer = PyMem_MALLOC(buflen * sizeof(wchar_t));
buffer = PyMem_NEW(wchar_t, buflen);
if (buffer == NULL) {
PyErr_NoMemory();
return NULL;
@ -3550,10 +3545,7 @@ PyUnicode_DecodeLocaleAndSize(const char *str, Py_ssize_t len,
wstr = smallbuf;
}
else {
if (wlen > PY_SSIZE_T_MAX / sizeof(wchar_t) - 1)
return PyErr_NoMemory();
wstr = PyMem_Malloc((wlen+1) * sizeof(wchar_t));
wstr = PyMem_New(wchar_t, wlen+1);
if (!wstr)
return PyErr_NoMemory();
}
@ -3858,6 +3850,11 @@ PyUnicode_AsUnicodeAndSize(PyObject *unicode, Py_ssize_t *size)
#endif
}
else {
if ((size_t)_PyUnicode_LENGTH(unicode) >
PY_SSIZE_T_MAX / sizeof(wchar_t) - 1) {
PyErr_NoMemory();
return NULL;
}
_PyUnicode_WSTR(unicode) = (wchar_t *) PyObject_MALLOC(sizeof(wchar_t) *
(_PyUnicode_LENGTH(unicode) + 1));
if (!_PyUnicode_WSTR(unicode)) {

View File

@ -939,7 +939,7 @@ Reg2Py(BYTE *retDataBuf, DWORD retDataSize, DWORD typ)
wchar_t *data = (wchar_t *)retDataBuf;
int len = retDataSize / 2;
int s = countStrings(data, len);
wchar_t **str = (wchar_t **)PyMem_Malloc(sizeof(wchar_t *)*s);
wchar_t **str = PyMem_New(wchar_t *, s);
if (str == NULL)
return PyErr_NoMemory();
@ -1206,7 +1206,7 @@ PyEnumValue(PyObject *self, PyObject *args)
++retDataSize;
bufDataSize = retDataSize;
bufValueSize = retValueSize;
retValueBuf = (wchar_t *)PyMem_Malloc(sizeof(wchar_t) * retValueSize);
retValueBuf = PyMem_New(wchar_t, retValueSize);
if (retValueBuf == NULL)
return PyErr_NoMemory();
retDataBuf = (BYTE *)PyMem_Malloc(retDataSize);
@ -1277,7 +1277,7 @@ PyExpandEnvironmentStrings(PyObject *self, PyObject *args)
return PyErr_SetFromWindowsErrWithFunction(retValueSize,
"ExpandEnvironmentStrings");
}
retValue = (wchar_t *)PyMem_Malloc(retValueSize * sizeof(wchar_t));
retValue = PyMem_New(wchar_t, retValueSize);
if (retValue == NULL) {
return PyErr_NoMemory();
}

View File

@ -24,11 +24,13 @@ Py_FrozenMain(int argc, char **argv)
/* We need a second copies, as Python might modify the first one. */
wchar_t **argv_copy2 = NULL;
argv_copy = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
argv_copy2 = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
if (!argv_copy || !argv_copy2) {
fprintf(stderr, "out of memory\n");
goto error;
if (argc > 0) {
argv_copy = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
argv_copy2 = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
if (!argv_copy || !argv_copy2) {
fprintf(stderr, "out of memory\n");
goto error;
}
}
Py_FrozenFlag = 1; /* Suppress errors from getpath.c */
@ -68,7 +70,8 @@ Py_FrozenMain(int argc, char **argv)
#ifdef MS_WINDOWS
PyInitFrozenExtensions();
#endif /* MS_WINDOWS */
Py_SetProgramName(argv_copy[0]);
if (argc >= 1)
Py_SetProgramName(argv_copy[0]);
Py_Initialize();
#ifdef MS_WINDOWS
PyWinFreeze_ExeInit();

View File

@ -290,7 +290,7 @@ fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts, PyObject *v
static unsigned int *
markblocks(unsigned char *code, Py_ssize_t len)
{
unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int));
unsigned int *blocks = PyMem_New(unsigned int, len);
int i,j, opcode, blockcnt = 0;
if (blocks == NULL) {
@ -398,7 +398,7 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
goto exitUnchanged;
/* Mapping to new jump targets after NOPs are removed */
addrmap = (int *)PyMem_Malloc(codelen * sizeof(int));
addrmap = PyMem_New(int, codelen);
if (addrmap == NULL) {
PyErr_NoMemory();
goto exitError;

6
configure vendored
View File

@ -6283,7 +6283,11 @@ then
if test "$Py_DEBUG" = 'true' ; then
# Optimization messes up debuggers, so turn it off for
# debug builds.
OPT="-g -O0 -Wall $STRICT_PROTO"
if "$CC" -v --help 2>/dev/null |grep -- -Og > /dev/null; then
OPT="-g -Og -Wall $STRICT_PROTO"
else
OPT="-g -O0 -Wall $STRICT_PROTO"
fi
else
OPT="-g $WRAP -O3 -Wall $STRICT_PROTO"
fi

View File

@ -1119,7 +1119,11 @@ then
if test "$Py_DEBUG" = 'true' ; then
# Optimization messes up debuggers, so turn it off for
# debug builds.
OPT="-g -O0 -Wall $STRICT_PROTO"
if "$CC" -v --help 2>/dev/null |grep -- -Og > /dev/null; then
OPT="-g -Og -Wall $STRICT_PROTO"
else
OPT="-g -O0 -Wall $STRICT_PROTO"
fi
else
OPT="-g $WRAP -O3 -Wall $STRICT_PROTO"
fi