bpo-32746: Fix multiple typos (GH-5144)
Fix typos found by codespell in docs, docstrings, and comments.
This commit is contained in:
parent
589c718a8e
commit
c3d9508ff2
|
@ -567,7 +567,7 @@ An option group is obtained using the class :class:`OptionGroup`:
|
|||
|
||||
where
|
||||
|
||||
* parser is the :class:`OptionParser` instance the group will be insterted in
|
||||
* parser is the :class:`OptionParser` instance the group will be inserted in
|
||||
to
|
||||
* title is the group title
|
||||
* description, optional, is a long description of the group
|
||||
|
|
|
@ -14,7 +14,7 @@ the standard audio interface for Linux and recent versions of FreeBSD.
|
|||
.. Things will get more complicated for future Linux versions, since
|
||||
ALSA is in the standard kernel as of 2.5.x. Presumably if you
|
||||
use ALSA, you'll have to make sure its OSS compatibility layer
|
||||
is active to use ossaudiodev, but you're gonna need it for the vast
|
||||
is active to use ossaudiodev, but you're going to need it for the vast
|
||||
majority of Linux audio apps anyway.
|
||||
|
||||
Sounds like things are also complicated for other BSDs. In response
|
||||
|
@ -447,4 +447,3 @@ The remaining methods are specific to audio mixing:
|
|||
microphone input::
|
||||
|
||||
mixer.setrecsrc (1 << ossaudiodev.SOUND_MIXER_MIC)
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ as multiple shared libraries can be in the archive - even sharing the same name.
|
|||
In documentation the archive is also referred to as the "base" and the shared
|
||||
library object is referred to as the "member".
|
||||
|
||||
For dlopen() on AIX (read initAndLoad()) the calls are similiar.
|
||||
For dlopen() on AIX (read initAndLoad()) the calls are similar.
|
||||
Default activity occurs when no path information is provided. When path
|
||||
information is provided dlopen() does not search any other directories.
|
||||
|
||||
|
@ -90,7 +90,7 @@ def get_ld_header_info(p):
|
|||
if re.match("[0-9]", line):
|
||||
info.append(line)
|
||||
else:
|
||||
# blank line (seperator), consume line and end for loop
|
||||
# blank line (separator), consume line and end for loop
|
||||
break
|
||||
return info
|
||||
|
||||
|
|
|
@ -2745,8 +2745,8 @@ def _fold_mime_parameters(part, lines, maxlen, encoding):
|
|||
|
||||
Using the decoded list of parameters and values, format them according to
|
||||
the RFC rules, including using RFC2231 encoding if the value cannot be
|
||||
expressed in 'encoding' and/or the paramter+value is too long to fit within
|
||||
'maxlen'.
|
||||
expressed in 'encoding' and/or the parameter+value is too long to fit
|
||||
within 'maxlen'.
|
||||
|
||||
"""
|
||||
# Special case for RFC2231 encoding: start from decoded values and use
|
||||
|
|
|
@ -495,7 +495,7 @@ class FontPage(Frame):
|
|||
Changing any of the font vars invokes var_changed_font, which
|
||||
adds all 3 font options to changes and calls set_samples.
|
||||
Set_samples applies a new font constructed from the font vars to
|
||||
font_sample and to highlight_sample on the hightlight page.
|
||||
font_sample and to highlight_sample on the highlight page.
|
||||
|
||||
Tabs: Enable users to change spaces entered for indent tabs.
|
||||
Changing indent_scale value with the mouse sets Var space_num,
|
||||
|
@ -646,7 +646,7 @@ class FontPage(Frame):
|
|||
|
||||
Called on font initialization and change events.
|
||||
Accesses font_name, font_size, and font_bold Variables.
|
||||
Updates font_sample and hightlight page highlight_sample.
|
||||
Updates font_sample and highlight page highlight_sample.
|
||||
"""
|
||||
font_name = self.font_name.get()
|
||||
font_weight = tkFont.BOLD if self.font_bold.get() else tkFont.NORMAL
|
||||
|
|
|
@ -142,7 +142,7 @@ name_op('LOAD_NAME', 101) # Index in name list
|
|||
def_op('BUILD_TUPLE', 102) # Number of tuple items
|
||||
def_op('BUILD_LIST', 103) # Number of list items
|
||||
def_op('BUILD_SET', 104) # Number of set items
|
||||
def_op('BUILD_MAP', 105) # Number of dict entries (upto 255)
|
||||
def_op('BUILD_MAP', 105) # Number of dict entries
|
||||
name_op('LOAD_ATTR', 106) # Index in name list
|
||||
def_op('COMPARE_OP', 107) # Comparison operator
|
||||
hascompare.append(107)
|
||||
|
|
|
@ -1861,7 +1861,7 @@ class TestDateTime(TestDate):
|
|||
|
||||
# Make sure comparison doesn't forget microseconds, and isn't done
|
||||
# via comparing a float timestamp (an IEEE double doesn't have enough
|
||||
# precision to span microsecond resolution across years 1 thru 9999,
|
||||
# precision to span microsecond resolution across years 1 through 9999,
|
||||
# so comparing via timestamp necessarily calls some distinct values
|
||||
# equal).
|
||||
dt1 = self.theclass(MAXYEAR, 12, 31, 23, 59, 59, 999998)
|
||||
|
|
|
@ -51,7 +51,7 @@ class samplecmdclass(cmd.Cmd):
|
|||
|
||||
Test for the function completedefault():
|
||||
>>> mycmd.completedefault()
|
||||
This is the completedefault methode
|
||||
This is the completedefault method
|
||||
>>> mycmd.completenames("a")
|
||||
['add']
|
||||
|
||||
|
@ -140,7 +140,7 @@ class samplecmdclass(cmd.Cmd):
|
|||
print("Hello from postloop")
|
||||
|
||||
def completedefault(self, *ignored):
|
||||
print("This is the completedefault methode")
|
||||
print("This is the completedefault method")
|
||||
|
||||
def complete_command(self):
|
||||
print("complete command")
|
||||
|
|
|
@ -398,7 +398,7 @@ class ProcessPoolShutdownTest(ExecutorShutdownTest):
|
|||
queue_management_thread = executor._queue_management_thread
|
||||
del executor
|
||||
|
||||
# Make sure that all the executor ressources were properly cleaned by
|
||||
# Make sure that all the executor resources were properly cleaned by
|
||||
# the shutdown process
|
||||
queue_management_thread.join()
|
||||
for p in processes.values():
|
||||
|
@ -886,24 +886,24 @@ class ExecutorDeadlockTest:
|
|||
# extensive testing for deadlock caused by crashes in a pool.
|
||||
self.executor.shutdown(wait=True)
|
||||
crash_cases = [
|
||||
# Check problem occuring while pickling a task in
|
||||
# Check problem occurring while pickling a task in
|
||||
# the task_handler thread
|
||||
(id, (ErrorAtPickle(),), PicklingError, "error at task pickle"),
|
||||
# Check problem occuring while unpickling a task on workers
|
||||
# Check problem occurring while unpickling a task on workers
|
||||
(id, (ExitAtUnpickle(),), BrokenProcessPool,
|
||||
"exit at task unpickle"),
|
||||
(id, (ErrorAtUnpickle(),), BrokenProcessPool,
|
||||
"error at task unpickle"),
|
||||
(id, (CrashAtUnpickle(),), BrokenProcessPool,
|
||||
"crash at task unpickle"),
|
||||
# Check problem occuring during func execution on workers
|
||||
# Check problem occurring during func execution on workers
|
||||
(_crash, (), BrokenProcessPool,
|
||||
"crash during func execution on worker"),
|
||||
(_exit, (), SystemExit,
|
||||
"exit during func execution on worker"),
|
||||
(_raise_error, (RuntimeError, ), RuntimeError,
|
||||
"error during func execution on worker"),
|
||||
# Check problem occuring while pickling a task result
|
||||
# Check problem occurring while pickling a task result
|
||||
# on workers
|
||||
(_return_instance, (CrashAtPickle,), BrokenProcessPool,
|
||||
"crash during result pickle on worker"),
|
||||
|
@ -911,7 +911,7 @@ class ExecutorDeadlockTest:
|
|||
"exit during result pickle on worker"),
|
||||
(_return_instance, (ErrorAtPickle,), PicklingError,
|
||||
"error during result pickle on worker"),
|
||||
# Check problem occuring while unpickling a task in
|
||||
# Check problem occurring while unpickling a task in
|
||||
# the result_handler thread
|
||||
(_return_instance, (ErrorAtUnpickle,), BrokenProcessPool,
|
||||
"error during result unpickle in result_handler"),
|
||||
|
|
|
@ -1500,7 +1500,7 @@ class Knights:
|
|||
succs[final].remove(corner)
|
||||
add_to_successors(this)
|
||||
|
||||
# Generate moves 3 thru m*n-1.
|
||||
# Generate moves 3 through m*n-1.
|
||||
def advance(len=len):
|
||||
# If some successor has only one exit, must take it.
|
||||
# Else favor successors with fewer exits.
|
||||
|
@ -1522,7 +1522,7 @@ class Knights:
|
|||
yield i
|
||||
add_to_successors(i)
|
||||
|
||||
# Generate moves 3 thru m*n-1. Alternative version using a
|
||||
# Generate moves 3 through m*n-1. Alternative version using a
|
||||
# stronger (but more expensive) heuristic to order successors.
|
||||
# Since the # of backtracking levels is m*n, a poor move early on
|
||||
# can take eons to undo. Smallest square board for which this
|
||||
|
|
|
@ -1637,7 +1637,7 @@ class MappingTestCase(TestBase):
|
|||
# has to keep looping to find the first object we delete.
|
||||
objs.reverse()
|
||||
|
||||
# Turn on mutation in C.__eq__. The first time thru the loop,
|
||||
# Turn on mutation in C.__eq__. The first time through the loop,
|
||||
# under the iterkeys() business the first comparison will delete
|
||||
# the last item iterkeys() would see, and that causes a
|
||||
# RuntimeError: dictionary changed size during iteration
|
||||
|
|
|
@ -199,8 +199,8 @@ class TestDiscovery(unittest.TestCase):
|
|||
['a_directory', 'test_directory', 'test_directory2'])
|
||||
|
||||
# load_tests should have been called once with loader, tests and pattern
|
||||
# (but there are no tests in our stub module itself, so thats [] at the
|
||||
# time of call.
|
||||
# (but there are no tests in our stub module itself, so that is [] at
|
||||
# the time of call).
|
||||
self.assertEqual(Module.load_tests_args,
|
||||
[(loader, [], 'test*')])
|
||||
|
||||
|
|
|
@ -187,7 +187,7 @@ is_coroutine(PyObject *coro)
|
|||
return _is_coroutine(coro);
|
||||
}
|
||||
|
||||
/* either an error has occured or
|
||||
/* either an error has occurred or
|
||||
type(coro) is in iscoroutine_typecache
|
||||
*/
|
||||
return has_it;
|
||||
|
|
|
@ -3420,7 +3420,7 @@ Inconsistent:
|
|||
PyErr_SetString(PyExc_ValueError, "fromutc: tz.dst() gave"
|
||||
"inconsistent results; cannot convert");
|
||||
|
||||
/* fall thru to failure */
|
||||
/* fall through to failure */
|
||||
Fail:
|
||||
Py_XDECREF(off);
|
||||
Py_XDECREF(dst);
|
||||
|
|
|
@ -962,7 +962,7 @@ generate_hash_name_list(void)
|
|||
* This macro generates constructor function definitions for specific
|
||||
* hash algorithms. These constructors are much faster than calling
|
||||
* the generic one passing it a python string and are noticeably
|
||||
* faster than calling a python new() wrapper. Thats important for
|
||||
* faster than calling a python new() wrapper. That is important for
|
||||
* code that wants to make hashes of a bunch of small strings.
|
||||
* The first call will lazy-initialize, which reports an exception
|
||||
* if initialization fails.
|
||||
|
|
|
@ -1015,7 +1015,7 @@ _Pickler_OpcodeBoundary(PicklerObject *self)
|
|||
if(_Pickler_CommitFrame(self)) {
|
||||
return -1;
|
||||
}
|
||||
/* Flush the content of the commited frame to the underlying
|
||||
/* Flush the content of the committed frame to the underlying
|
||||
* file and reuse the pickler buffer for the next frame so as
|
||||
* to limit memory usage when dumping large complex objects to
|
||||
* a file.
|
||||
|
|
|
@ -2469,7 +2469,7 @@ pool_is_in_list(const poolp target, poolp list)
|
|||
* checks.
|
||||
*
|
||||
* Return 0 if the memory debug hooks are not installed or no statistics was
|
||||
* writen into out, return 1 otherwise.
|
||||
* written into out, return 1 otherwise.
|
||||
*/
|
||||
int
|
||||
_PyObject_DebugMallocStats(FILE *out)
|
||||
|
|
|
@ -5112,7 +5112,7 @@ PyType_Ready(PyTypeObject *type)
|
|||
/* PyType_Ready is the closest thing we have to a choke point
|
||||
* for type objects, so is the best place I can think of to try
|
||||
* to get type objects into the doubly-linked list of all objects.
|
||||
* Still, not all type objects go thru PyType_Ready.
|
||||
* Still, not all type objects go through PyType_Ready.
|
||||
*/
|
||||
_Py_AddToAllObjects((PyObject *)type, 0);
|
||||
#endif
|
||||
|
|
|
@ -1784,7 +1784,7 @@ vgetargskeywords(PyObject *args, PyObject *kwargs, const char *format,
|
|||
}
|
||||
}
|
||||
|
||||
/* We are into optional args, skip thru to any remaining
|
||||
/* We are into optional args, skip through to any remaining
|
||||
* keyword args */
|
||||
msg = skipitem(&format, p_va, flags);
|
||||
if (msg) {
|
||||
|
@ -2176,7 +2176,7 @@ vgetargskeywordsfast_impl(PyObject *const *args, Py_ssize_t nargs,
|
|||
return cleanreturn(1, &freelist);
|
||||
}
|
||||
|
||||
/* We are into optional args, skip thru to any remaining
|
||||
/* We are into optional args, skip through to any remaining
|
||||
* keyword args */
|
||||
msg = skipitem(&format, p_va, flags);
|
||||
assert(msg == NULL);
|
||||
|
|
Loading…
Reference in New Issue