#11565: Merge with 3.2.

This commit is contained in:
Ezio Melotti 2011-03-16 11:38:59 +02:00
commit b88ed1549e
93 changed files with 124 additions and 124 deletions

View File

@ -1,6 +1,6 @@
import sqlite3 import sqlite3
# The shared cache is only available in SQLite versions 3.3.3 or later # The shared cache is only available in SQLite versions 3.3.3 or later
# See the SQLite documentaton for details. # See the SQLite documentation for details.
sqlite3.enable_shared_cache(True) sqlite3.enable_shared_cache(True)

View File

@ -468,7 +468,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
arbitrary data. arbitrary data.
0 is returned on success. buffer and buffer_len are only 0 is returned on success. buffer and buffer_len are only
set in case no error occurrs. Otherwise, -1 is returned and set in case no error occurs. Otherwise, -1 is returned and
an exception set. an exception set.
*/ */
@ -482,7 +482,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
writable memory location in buffer of size buffer_len. writable memory location in buffer of size buffer_len.
0 is returned on success. buffer and buffer_len are only 0 is returned on success. buffer and buffer_len are only
set in case no error occurrs. Otherwise, -1 is returned and set in case no error occurs. Otherwise, -1 is returned and
an exception set. an exception set.
*/ */

View File

@ -61,7 +61,7 @@
# endif # endif
# if defined(__LP64__) # if defined(__LP64__)
/* MacOSX 10.4 (the first release to suppport 64-bit code /* MacOSX 10.4 (the first release to support 64-bit code
* at all) only supports 64-bit in the UNIX layer. * at all) only supports 64-bit in the UNIX layer.
* Therefore surpress the toolbox-glue in 64-bit mode. * Therefore surpress the toolbox-glue in 64-bit mode.
*/ */

View File

@ -284,7 +284,7 @@ class Sniffer:
an all or nothing approach, so we allow for small variations in this an all or nothing approach, so we allow for small variations in this
number. number.
1) build a table of the frequency of each character on every line. 1) build a table of the frequency of each character on every line.
2) build a table of freqencies of this frequency (meta-frequency?), 2) build a table of frequencies of this frequency (meta-frequency?),
e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows, e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows,
7 times in 2 rows' 7 times in 2 rows'
3) use the mode of the meta-frequency to determine the /expected/ 3) use the mode of the meta-frequency to determine the /expected/

View File

@ -37,7 +37,7 @@ class ArrayTestCase(unittest.TestCase):
values = [ia[i] for i in range(len(init))] values = [ia[i] for i in range(len(init))]
self.assertEqual(values, [0] * len(init)) self.assertEqual(values, [0] * len(init))
# Too many in itializers should be caught # Too many initializers should be caught
self.assertRaises(IndexError, int_array, *range(alen*2)) self.assertRaises(IndexError, int_array, *range(alen*2))
CharArray = ARRAY(c_char, 3) CharArray = ARRAY(c_char, 3)

View File

@ -27,7 +27,7 @@ class InitTest(unittest.TestCase):
self.assertEqual((y.x.a, y.x.b), (0, 0)) self.assertEqual((y.x.a, y.x.b), (0, 0))
self.assertEqual(y.x.new_was_called, False) self.assertEqual(y.x.new_was_called, False)
# But explicitely creating an X structure calls __new__ and __init__, of course. # But explicitly creating an X structure calls __new__ and __init__, of course.
x = X() x = X()
self.assertEqual((x.a, x.b), (9, 12)) self.assertEqual((x.a, x.b), (9, 12))
self.assertEqual(x.new_was_called, True) self.assertEqual(x.new_was_called, True)

View File

@ -157,7 +157,7 @@ class NumberTestCase(unittest.TestCase):
def test_int_from_address(self): def test_int_from_address(self):
from array import array from array import array
for t in signed_types + unsigned_types: for t in signed_types + unsigned_types:
# the array module doesn't suppport all format codes # the array module doesn't support all format codes
# (no 'q' or 'Q') # (no 'q' or 'Q')
try: try:
array(t._type_) array(t._type_)

View File

@ -17,7 +17,7 @@ if sys.platform == "win32" and sizeof(c_void_p) == sizeof(c_int):
# ValueError: Procedure probably called with not enough arguments (4 bytes missing) # ValueError: Procedure probably called with not enough arguments (4 bytes missing)
self.assertRaises(ValueError, IsWindow) self.assertRaises(ValueError, IsWindow)
# This one should succeeed... # This one should succeed...
self.assertEqual(0, IsWindow(0)) self.assertEqual(0, IsWindow(0))
# ValueError: Procedure probably called with too many arguments (8 bytes in excess) # ValueError: Procedure probably called with too many arguments (8 bytes in excess)

View File

@ -1719,7 +1719,7 @@ class HtmlDiff(object):
line = line.replace(' ','\0') line = line.replace(' ','\0')
# expand tabs into spaces # expand tabs into spaces
line = line.expandtabs(self._tabsize) line = line.expandtabs(self._tabsize)
# relace spaces from expanded tabs back into tab characters # replace spaces from expanded tabs back into tab characters
# (we'll replace them with markup after we do differencing) # (we'll replace them with markup after we do differencing)
line = line.replace(' ','\t') line = line.replace(' ','\t')
return line.replace('\0',' ').rstrip('\n') return line.replace('\0',' ').rstrip('\n')

View File

@ -359,7 +359,7 @@ class Command:
not self.force, dry_run=self.dry_run) not self.force, dry_run=self.dry_run)
def move_file (self, src, dst, level=1): def move_file (self, src, dst, level=1):
"""Move a file respectin dry-run flag.""" """Move a file respecting dry-run flag."""
return file_util.move_file(src, dst, dry_run=self.dry_run) return file_util.move_file(src, dst, dry_run=self.dry_run)
def spawn(self, cmd, search_path=1, level=1): def spawn(self, cmd, search_path=1, level=1):

View File

@ -155,7 +155,7 @@ class CygwinCCompiler(UnixCCompiler):
self.dll_libraries = get_msvcr() self.dll_libraries = get_msvcr()
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
"""Compiles the source by spawing GCC and windres if needed.""" """Compiles the source by spawning GCC and windres if needed."""
if ext == '.rc' or ext == '.res': if ext == '.rc' or ext == '.res':
# gcc needs '.res' and '.rc' compiled to object files !!! # gcc needs '.res' and '.rc' compiled to object files !!!
try: try:

View File

@ -39,7 +39,7 @@ class cleanTestCase(support.TempdirManager,
self.assertTrue(not os.path.exists(path), self.assertTrue(not os.path.exists(path),
'%s was not removed' % path) '%s was not removed' % path)
# let's run the command again (should spit warnings but suceed) # let's run the command again (should spit warnings but succeed)
cmd.all = 1 cmd.all = 1
cmd.ensure_finalized() cmd.ensure_finalized()
cmd.run() cmd.run()

View File

@ -62,7 +62,7 @@ class InstallTestCase(support.TempdirManager,
if sys.version < '2.6': if sys.version < '2.6':
return return
# preparing the environement for the test # preparing the environment for the test
self.old_user_base = site.USER_BASE self.old_user_base = site.USER_BASE
self.old_user_site = site.USER_SITE self.old_user_site = site.USER_SITE
self.tmpdir = self.mkdtemp() self.tmpdir = self.mkdtemp()

View File

@ -311,7 +311,7 @@ class SDistTestCase(PyPIRCCommandTestCase):
# adding a file # adding a file
self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
# make sure build_py is reinitinialized, like a fresh run # make sure build_py is reinitialized, like a fresh run
build_py = dist.get_command_obj('build_py') build_py = dist.get_command_obj('build_py')
build_py.finalized = False build_py.finalized = False
build_py.ensure_finalized() build_py.ensure_finalized()

View File

@ -1211,7 +1211,7 @@ class DocTestRunner:
# Process each example. # Process each example.
for examplenum, example in enumerate(test.examples): for examplenum, example in enumerate(test.examples):
# If REPORT_ONLY_FIRST_FAILURE is set, then supress # If REPORT_ONLY_FIRST_FAILURE is set, then suppress
# reporting after the first failure. # reporting after the first failure.
quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
failures > 0) failures > 0)
@ -2135,7 +2135,7 @@ class DocTestCase(unittest.TestCase):
caller can catch the errors and initiate post-mortem debugging. caller can catch the errors and initiate post-mortem debugging.
The DocTestCase provides a debug method that raises The DocTestCase provides a debug method that raises
UnexpectedException errors if there is an unexepcted UnexpectedException errors if there is an unexpected
exception: exception:
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',

View File

@ -47,7 +47,7 @@ ecre = re.compile(r'''
# For use with .match() # For use with .match()
fcre = re.compile(r'[\041-\176]+:$') fcre = re.compile(r'[\041-\176]+:$')
# Find a header embeded in a putative header value. Used to check for # Find a header embedded in a putative header value. Used to check for
# header injection attack. # header injection attack.
_embeded_header = re.compile(r'\n[^ \t]+:') _embeded_header = re.compile(r'\n[^ \t]+:')

View File

@ -54,7 +54,7 @@ class FormatParagraph:
# If the block ends in a \n, we dont want the comment # If the block ends in a \n, we dont want the comment
# prefix inserted after it. (Im not sure it makes sense to # prefix inserted after it. (Im not sure it makes sense to
# reformat a comment block that isnt made of complete # reformat a comment block that isnt made of complete
# lines, but whatever!) Can't think of a clean soltution, # lines, but whatever!) Can't think of a clean solution,
# so we hack away # so we hack away
block_suffix = "" block_suffix = ""
if not newdata[-1]: if not newdata[-1]:

View File

@ -18,7 +18,7 @@ window.
An IDLE extension class is instantiated with a single argument, An IDLE extension class is instantiated with a single argument,
`editwin', an EditorWindow instance. The extension cannot assume much `editwin', an EditorWindow instance. The extension cannot assume much
about this argument, but it is guarateed to have the following instance about this argument, but it is guaranteed to have the following instance
variables: variables:
text a Text instance (a widget) text a Text instance (a widget)

View File

@ -53,8 +53,8 @@ def tkVersionWarning(root):
def addOpenEventSupport(root, flist): def addOpenEventSupport(root, flist):
""" """
This ensures that the application will respont to open AppleEvents, which This ensures that the application will respond to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files. makes is feasible to use IDLE as the default application for python files.
""" """
def doOpenFile(*args): def doOpenFile(*args):
for fn in args: for fn in args:

View File

@ -48,7 +48,7 @@ def fixup_parse_tree(cls_node):
""" """
for node in cls_node.children: for node in cls_node.children:
if node.type == syms.suite: if node.type == syms.suite:
# already in the prefered format, do nothing # already in the preferred format, do nothing
return return
# !%@#! oneliners have no suite node, we have to fake one up # !%@#! oneliners have no suite node, we have to fake one up

View File

@ -51,7 +51,7 @@ class Converter(grammar.Grammar):
self.finish_off() self.finish_off()
def parse_graminit_h(self, filename): def parse_graminit_h(self, filename):
"""Parse the .h file writen by pgen. (Internal) """Parse the .h file written by pgen. (Internal)
This file is a sequence of #define statements defining the This file is a sequence of #define statements defining the
nonterminals of the grammar as numbers. We build two tables nonterminals of the grammar as numbers. We build two tables
@ -82,7 +82,7 @@ class Converter(grammar.Grammar):
return True return True
def parse_graminit_c(self, filename): def parse_graminit_c(self, filename):
"""Parse the .c file writen by pgen. (Internal) """Parse the .c file written by pgen. (Internal)
The file looks as follows. The first two lines are always this: The file looks as follows. The first two lines are always this:

View File

@ -658,8 +658,8 @@ class WildcardPattern(BasePattern):
content: optional sequence of subsequences of patterns; content: optional sequence of subsequences of patterns;
if absent, matches one node; if absent, matches one node;
if present, each subsequence is an alternative [*] if present, each subsequence is an alternative [*]
min: optinal minumum number of times to match, default 0 min: optional minimum number of times to match, default 0
max: optional maximum number of times tro match, default HUGE max: optional maximum number of times to match, default HUGE
name: optional name assigned to this match name: optional name assigned to this match
[*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is

View File

@ -316,7 +316,7 @@ class GrammarTests(unittest.TestCase):
### simple_stmt: small_stmt (';' small_stmt)* [';'] ### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x x = 1; pass; del x
def foo(): def foo():
# verify statments that end with semi-colons # verify statements that end with semi-colons
x = 1; pass; del x; x = 1; pass; del x;
foo() foo()

View File

@ -356,7 +356,7 @@ class GrammarTests(unittest.TestCase):
### simple_stmt: small_stmt (';' small_stmt)* [';'] ### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x x = 1; pass; del x
def foo(): def foo():
# verify statments that end with semi-colons # verify statements that end with semi-colons
x = 1; pass; del x; x = 1; pass; del x;
foo() foo()

View File

@ -405,7 +405,7 @@ def expanduser(path):
# - $varname is accepted. # - $varname is accepted.
# - %varname% is accepted. # - %varname% is accepted.
# - varnames can be made out of letters, digits and the characters '_-' # - varnames can be made out of letters, digits and the characters '_-'
# (though is not verifed in the ${varname} and %varname% cases) # (though is not verified in the ${varname} and %varname% cases)
# XXX With COMMAND.COM you can use any characters in a variable name, # XXX With COMMAND.COM you can use any characters in a variable name,
# XXX except '^|<>='. # XXX except '^|<>='.

View File

@ -1406,7 +1406,7 @@ opcodes = [
proto=0, proto=0,
doc="""Read an object from the memo and push it on the stack. doc="""Read an object from the memo and push it on the stack.
The index of the memo object to push is given by the newline-teriminated The index of the memo object to push is given by the newline-terminated
decimal string following. BINGET and LONG_BINGET are space-optimized decimal string following. BINGET and LONG_BINGET are space-optimized
versions. versions.
"""), """),

View File

@ -417,7 +417,7 @@ def _syscmd_ver(system='', release='', version='',
info = pipe.read() info = pipe.read()
if pipe.close(): if pipe.close():
raise os.error('command failed') raise os.error('command failed')
# XXX How can I supress shell errors from being written # XXX How can I suppress shell errors from being written
# to stderr ? # to stderr ?
except os.error as why: except os.error as why:
#print 'Command %s failed: %s' % (cmd,why) #print 'Command %s failed: %s' % (cmd,why)

View File

@ -1010,7 +1010,7 @@ class Popen(object):
except pywintypes.error as e: except pywintypes.error as e:
# Translate pywintypes.error to WindowsError, which is # Translate pywintypes.error to WindowsError, which is
# a subclass of OSError. FIXME: We should really # a subclass of OSError. FIXME: We should really
# translate errno using _sys_errlist (or simliar), but # translate errno using _sys_errlist (or similar), but
# how can this be done from Python? # how can this be done from Python?
raise WindowsError(*e.args) raise WindowsError(*e.args)
finally: finally:

View File

@ -3414,7 +3414,7 @@ class TestTimezoneConversions(unittest.TestCase):
self.assertEqual(dt, there_and_back) self.assertEqual(dt, there_and_back)
# Because we have a redundant spelling when DST begins, there is # Because we have a redundant spelling when DST begins, there is
# (unforunately) an hour when DST ends that can't be spelled at all in # (unfortunately) an hour when DST ends that can't be spelled at all in
# local time. When DST ends, the clock jumps from 1:59 back to 1:00 # local time. When DST ends, the clock jumps from 1:59 back to 1:00
# again. The hour 1:MM DST has no spelling then: 1:MM is taken to be # again. The hour 1:MM DST has no spelling then: 1:MM is taken to be
# standard time. 1:MM DST == 0:MM EST, but 0:MM is taken to be # standard time. 1:MM DST == 0:MM EST, but 0:MM is taken to be

View File

@ -19,7 +19,7 @@ class C (B):
# XXX: This causes test_pyclbr.py to fail, but only because the # XXX: This causes test_pyclbr.py to fail, but only because the
# introspection-based is_method() code in the test can't # introspection-based is_method() code in the test can't
# distinguish between this and a geniune method function like m(). # distinguish between this and a genuine method function like m().
# The pyclbr.py module gets this right as it parses the text. # The pyclbr.py module gets this right as it parses the text.
# #
#f = f #f = f

View File

@ -127,7 +127,7 @@ class TestPendingCalls(unittest.TestCase):
context.event.set() context.event.set()
def test_pendingcalls_non_threaded(self): def test_pendingcalls_non_threaded(self):
#again, just using the main thread, likely they will all be dispathced at #again, just using the main thread, likely they will all be dispatched at
#once. It is ok to ask for too many, because we loop until we find a slot. #once. It is ok to ask for too many, because we loop until we find a slot.
#the loop can be interrupted to dispatch. #the loop can be interrupted to dispatch.
#there are only 32 dispatch slots, so we go for twice that! #there are only 32 dispatch slots, so we go for twice that!

View File

@ -228,7 +228,7 @@ class DecimalTest(unittest.TestCase):
try: try:
t = self.eval_line(line) t = self.eval_line(line)
except DecimalException as exception: except DecimalException as exception:
#Exception raised where there shoudn't have been one. #Exception raised where there shouldn't have been one.
self.fail('Exception "'+exception.__class__.__name__ + '" raised on line '+line) self.fail('Exception "'+exception.__class__.__name__ + '" raised on line '+line)
return return

View File

@ -3967,7 +3967,7 @@ order (MRO) for bases """
except TypeError: except TypeError:
pass pass
else: else:
self.fail("Carlo Verre __setattr__ suceeded!") self.fail("Carlo Verre __setattr__ succeeded!")
try: try:
object.__delattr__(str, "lower") object.__delattr__(str, "lower")
except TypeError: except TypeError:

View File

@ -1297,7 +1297,7 @@ marking, as well as interline differences.
? + ++ ^ ? + ++ ^
TestResults(failed=1, attempted=1) TestResults(failed=1, attempted=1)
The REPORT_ONLY_FIRST_FAILURE supresses result output after the first The REPORT_ONLY_FIRST_FAILURE suppresses result output after the first
failing example: failing example:
>>> def f(x): >>> def f(x):
@ -1327,7 +1327,7 @@ failing example:
2 2
TestResults(failed=3, attempted=5) TestResults(failed=3, attempted=5)
However, output from `report_start` is not supressed: However, output from `report_start` is not suppressed:
>>> doctest.DocTestRunner(verbose=True, optionflags=flags).run(test) >>> doctest.DocTestRunner(verbose=True, optionflags=flags).run(test)
... # doctest: +ELLIPSIS ... # doctest: +ELLIPSIS
@ -2278,7 +2278,7 @@ We don't want `-v` in sys.argv for these tests.
>>> doctest.master = None # Reset master. >>> doctest.master = None # Reset master.
(Note: we'll be clearing doctest.master after each call to (Note: we'll be clearing doctest.master after each call to
`doctest.testfile`, to supress warnings about multiple tests with the `doctest.testfile`, to suppress warnings about multiple tests with the
same name.) same name.)
Globals may be specified with the `globs` and `extraglobs` parameters: Globals may be specified with the `globs` and `extraglobs` parameters:
@ -2314,7 +2314,7 @@ optional `module_relative` parameter:
TestResults(failed=0, attempted=2) TestResults(failed=0, attempted=2)
>>> doctest.master = None # Reset master. >>> doctest.master = None # Reset master.
Verbosity can be increased with the optional `verbose` paremter: Verbosity can be increased with the optional `verbose` parameter:
>>> doctest.testfile('test_doctest.txt', globs=globs, verbose=True) >>> doctest.testfile('test_doctest.txt', globs=globs, verbose=True)
Trying: Trying:
@ -2351,7 +2351,7 @@ parameter:
TestResults(failed=1, attempted=2) TestResults(failed=1, attempted=2)
>>> doctest.master = None # Reset master. >>> doctest.master = None # Reset master.
The summary report may be supressed with the optional `report` The summary report may be suppressed with the optional `report`
parameter: parameter:
>>> doctest.testfile('test_doctest.txt', report=False) >>> doctest.testfile('test_doctest.txt', report=False)

View File

@ -228,7 +228,7 @@ Another helper function
>>> Foo.method(1, *[2, 3]) >>> Foo.method(1, *[2, 3])
5 5
A PyCFunction that takes only positional parameters shoud allow an A PyCFunction that takes only positional parameters should allow an
empty keyword dictionary to pass without a complaint, but raise a empty keyword dictionary to pass without a complaint, but raise a
TypeError if te dictionary is not empty TypeError if te dictionary is not empty

View File

@ -67,7 +67,7 @@ class GeneralFloatCases(unittest.TestCase):
def test_float_with_comma(self): def test_float_with_comma(self):
# set locale to something that doesn't use '.' for the decimal point # set locale to something that doesn't use '.' for the decimal point
# float must not accept the locale specific decimal point but # float must not accept the locale specific decimal point but
# it still has to accept the normal python syntac # it still has to accept the normal python syntax
import locale import locale
if not locale.localeconv()['decimal_point'] == ',': if not locale.localeconv()['decimal_point'] == ',':
return return
@ -189,7 +189,7 @@ class GeneralFloatCases(unittest.TestCase):
def assertEqualAndEqualSign(self, a, b): def assertEqualAndEqualSign(self, a, b):
# fail unless a == b and a and b have the same sign bit; # fail unless a == b and a and b have the same sign bit;
# the only difference from assertEqual is that this test # the only difference from assertEqual is that this test
# distingishes -0.0 and 0.0. # distinguishes -0.0 and 0.0.
self.assertEqual((a, copysign(1.0, a)), (b, copysign(1.0, b))) self.assertEqual((a, copysign(1.0, a)), (b, copysign(1.0, b)))
@support.requires_IEEE_754 @support.requires_IEEE_754

View File

@ -350,7 +350,7 @@ class GrammarTests(unittest.TestCase):
### simple_stmt: small_stmt (';' small_stmt)* [';'] ### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x x = 1; pass; del x
def foo(): def foo():
# verify statments that end with semi-colons # verify statements that end with semi-colons
x = 1; pass; del x; x = 1; pass; del x;
foo() foo()

View File

@ -462,7 +462,7 @@ class RejectingSocketlessRequestHandler(SocketlessRequestHandler):
return False return False
class BaseHTTPRequestHandlerTestCase(unittest.TestCase): class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
"""Test the functionaility of the BaseHTTPServer. """Test the functionality of the BaseHTTPServer.
Test the support for the Expect 100-continue header. Test the support for the Expect 100-continue header.
""" """

View File

@ -20,11 +20,11 @@ This is the case for tuples, range objects, and itertools.repeat().
Some containers become temporarily immutable during iteration. This includes Some containers become temporarily immutable during iteration. This includes
dicts, sets, and collections.deque. Their implementation is equally simple dicts, sets, and collections.deque. Their implementation is equally simple
though they need to permantently set their length to zero whenever there is though they need to permanently set their length to zero whenever there is
an attempt to iterate after a length mutation. an attempt to iterate after a length mutation.
The situation slightly more involved whenever an object allows length mutation The situation slightly more involved whenever an object allows length mutation
during iteration. Lists and sequence iterators are dynanamically updatable. during iteration. Lists and sequence iterators are dynamically updatable.
So, if a list is extended during iteration, the iterator will continue through So, if a list is extended during iteration, the iterator will continue through
the new items. If it shrinks to a point before the most recent iteration, the new items. If it shrinks to a point before the most recent iteration,
then no further items are available and the length is reported at zero. then no further items are available and the length is reported at zero.

View File

@ -1526,7 +1526,7 @@ Samuele
... return chain(iterable, repeat(None)) ... return chain(iterable, repeat(None))
>>> def ncycles(iterable, n): >>> def ncycles(iterable, n):
... "Returns the seqeuence elements n times" ... "Returns the sequence elements n times"
... return chain(*repeat(iterable, n)) ... return chain(*repeat(iterable, n))
>>> def dotproduct(vec1, vec2): >>> def dotproduct(vec1, vec2):

View File

@ -194,7 +194,7 @@ class BugsTestCase(unittest.TestCase):
# >>> type(loads(dumps(Int()))) # >>> type(loads(dumps(Int())))
# <type 'int'> # <type 'int'>
for typ in (int, float, complex, tuple, list, dict, set, frozenset): for typ in (int, float, complex, tuple, list, dict, set, frozenset):
# Note: str sublclasses are not tested because they get handled # Note: str subclasses are not tested because they get handled
# by marshal's routines for objects supporting the buffer API. # by marshal's routines for objects supporting the buffer API.
subtyp = type('subtyp', (typ,), {}) subtyp = type('subtyp', (typ,), {})
self.assertRaises(ValueError, marshal.dumps, subtyp()) self.assertRaises(ValueError, marshal.dumps, subtyp())

View File

@ -820,7 +820,7 @@ class MathTests(unittest.TestCase):
# the following tests have been commented out since they don't # the following tests have been commented out since they don't
# really belong here: the implementation of ** for floats is # really belong here: the implementation of ** for floats is
# independent of the implemention of math.pow # independent of the implementation of math.pow
#self.assertEqual(1**NAN, 1) #self.assertEqual(1**NAN, 1)
#self.assertEqual(1**INF, 1) #self.assertEqual(1**INF, 1)
#self.assertEqual(1**NINF, 1) #self.assertEqual(1**NINF, 1)

View File

@ -594,7 +594,7 @@ class MmapTests(unittest.TestCase):
m2.close() m2.close()
m1.close() m1.close()
# Test differnt tag # Test different tag
m1 = mmap.mmap(-1, len(data1), tagname="foo") m1 = mmap.mmap(-1, len(data1), tagname="foo")
m1[:] = data1 m1[:] = data1
m2 = mmap.mmap(-1, len(data2), tagname="boo") m2 = mmap.mmap(-1, len(data2), tagname="boo")

View File

@ -795,7 +795,7 @@ class _TestEvent(BaseTestCase):
event = self.Event() event = self.Event()
wait = TimingWrapper(event.wait) wait = TimingWrapper(event.wait)
# Removed temporaily, due to API shear, this does not # Removed temporarily, due to API shear, this does not
# work with threading._Event objects. is_set == isSet # work with threading._Event objects. is_set == isSet
self.assertEqual(event.is_set(), False) self.assertEqual(event.is_set(), False)
@ -1765,7 +1765,7 @@ class _TestFinalize(BaseTestCase):
util.Finalize(None, conn.send, args=('STOP',), exitpriority=-100) util.Finalize(None, conn.send, args=('STOP',), exitpriority=-100)
# call mutliprocessing's cleanup function then exit process without # call multiprocessing's cleanup function then exit process without
# garbage collecting locals # garbage collecting locals
util._exit_function() util._exit_function()
conn.close() conn.close()

View File

@ -56,7 +56,7 @@ class TestPkg(unittest.TestCase):
if self.root: # Only clean if the test was actually run if self.root: # Only clean if the test was actually run
cleanout(self.root) cleanout(self.root)
# delete all modules concerning the tested hiearchy # delete all modules concerning the tested hierarchy
if self.pkgname: if self.pkgname:
modules = [name for name in sys.modules modules = [name for name in sys.modules
if self.pkgname in name.split('.')] if self.pkgname in name.split('.')]

View File

@ -20,7 +20,7 @@ NotDefined = object()
# A dispatch table all 8 combinations of providing # A dispatch table all 8 combinations of providing
# sep, end, and file # sep, end, and file
# I use this machinery so that I'm not just passing default # I use this machinery so that I'm not just passing default
# values to print, I'm eiher passing or not passing in the # values to print, I'm either passing or not passing in the
# arguments # arguments
dispatch = { dispatch = {
(False, False, False): (False, False, False):

View File

@ -22,7 +22,7 @@ We have to test this with various file encodings. We also test it with
exec()/eval(), which uses a different code path. exec()/eval(), which uses a different code path.
This file is really about correct treatment of encodings and This file is really about correct treatment of encodings and
backslashes. It doens't concern itself with issues like single backslashes. It doesn't concern itself with issues like single
vs. double quotes or singly- vs. triply-quoted strings: that's dealt vs. double quotes or singly- vs. triply-quoted strings: that's dealt
with elsewhere (I assume). with elsewhere (I assume).
""" """

View File

@ -536,7 +536,7 @@ class CacheTests(unittest.TestCase):
self.assertIsNot(first_time_re, second_time_re) self.assertIsNot(first_time_re, second_time_re)
# Possible test locale is not supported while initial locale is. # Possible test locale is not supported while initial locale is.
# If this is the case just suppress the exception and fall-through # If this is the case just suppress the exception and fall-through
# to the reseting to the original locale. # to the resetting to the original locale.
except locale.Error: except locale.Error:
pass pass
# Make sure we don't trample on the locale setting once we leave the # Make sure we don't trample on the locale setting once we leave the

View File

@ -463,7 +463,7 @@ class StructTest(unittest.TestCase):
test_string) test_string)
def test_unpack_with_buffer(self): def test_unpack_with_buffer(self):
# SF bug 1563759: struct.unpack doens't support buffer protocol objects # SF bug 1563759: struct.unpack doesn't support buffer protocol objects
data1 = array.array('B', b'\x12\x34\x56\x78') data1 = array.array('B', b'\x12\x34\x56\x78')
data2 = memoryview(b'\x12\x34\x56\x78') # XXX b'......XXXX......', 6, 4 data2 = memoryview(b'\x12\x34\x56\x78') # XXX b'......XXXX......', 6, 4
for data in [data1, data2]: for data in [data1, data2]:

View File

@ -237,7 +237,7 @@ SyntaxError: can't assign to function call
Test continue in finally in weird combinations. Test continue in finally in weird combinations.
continue in for loop under finally shouuld be ok. continue in for loop under finally should be ok.
>>> def test(): >>> def test():
... try: ... try:

View File

@ -492,7 +492,7 @@ class SysModuleTest(unittest.TestCase):
# provide too much opportunity for insane things to happen. # provide too much opportunity for insane things to happen.
# We don't want them in the interned dict and if they aren't # We don't want them in the interned dict and if they aren't
# actually interned, we don't want to create the appearance # actually interned, we don't want to create the appearance
# that they are by allowing intern() to succeeed. # that they are by allowing intern() to succeed.
class S(str): class S(str):
def __hash__(self): def __hash__(self):
return 123 return 123

View File

@ -578,7 +578,7 @@ class ThreadJoinOnShutdown(BaseTestCase):
# This acquires the lock and then waits until the child has forked # This acquires the lock and then waits until the child has forked
# before returning, which will release the lock soon after. If # before returning, which will release the lock soon after. If
# someone else tries to fix this test case by acquiring this lock # someone else tries to fix this test case by acquiring this lock
# before forking instead of reseting it, the test case will # before forking instead of resetting it, the test case will
# deadlock when it shouldn't. # deadlock when it shouldn't.
condition = w._block condition = w._block
orig_acquire = condition.acquire orig_acquire = condition.acquire

View File

@ -209,7 +209,7 @@ class TestRunExecCounts(unittest.TestCase):
(self.my_py_filename, firstlineno + 4): 1, (self.my_py_filename, firstlineno + 4): 1,
} }
# When used through 'run', some other spurios counts are produced, like # When used through 'run', some other spurious counts are produced, like
# the settrace of threading, which we ignore, just making sure that the # the settrace of threading, which we ignore, just making sure that the
# counts fo traced_func_loop were right. # counts fo traced_func_loop were right.
# #

View File

@ -1021,7 +1021,7 @@ class URLopener_Tests(unittest.TestCase):
# Just commented them out. # Just commented them out.
# Can't really tell why keep failing in windows and sparc. # Can't really tell why keep failing in windows and sparc.
# Everywhere else they work ok, but on those machines, someteimes # Everywhere else they work ok, but on those machines, sometimes
# fail in one of the tests, sometimes in other. I have a linux, and # fail in one of the tests, sometimes in other. I have a linux, and
# the tests go ok. # the tests go ok.
# If anybody has one of the problematic enviroments, please help! # If anybody has one of the problematic enviroments, please help!

View File

@ -332,7 +332,7 @@ class WarnTests(unittest.TestCase):
sys.argv = argv sys.argv = argv
def test_warn_explicit_type_errors(self): def test_warn_explicit_type_errors(self):
# warn_explicit() shoud error out gracefully if it is given objects # warn_explicit() should error out gracefully if it is given objects
# of the wrong types. # of the wrong types.
# lineno is expected to be an integer. # lineno is expected to be an integer.
self.assertRaises(TypeError, self.module.warn_explicit, self.assertRaises(TypeError, self.module.warn_explicit,

View File

@ -135,7 +135,7 @@ class InternalFunctionsTest(unittest.TestCase):
# minimum acceptable for image type # minimum acceptable for image type
self.assertEqual(ttk._format_elemcreate('image', False, 'test'), self.assertEqual(ttk._format_elemcreate('image', False, 'test'),
("test ", ())) ("test ", ()))
# specifiyng a state spec # specifying a state spec
self.assertEqual(ttk._format_elemcreate('image', False, 'test', self.assertEqual(ttk._format_elemcreate('image', False, 'test',
('', 'a')), ("test {} a", ())) ('', 'a')), ("test {} a", ()))
# state spec with multiple states # state spec with multiple states

View File

@ -171,7 +171,7 @@ class tixCommand:
return self.tk.call('tix', 'getimage', name) return self.tk.call('tix', 'getimage', name)
def tix_option_get(self, name): def tix_option_get(self, name):
"""Gets the options manitained by the Tix """Gets the options maintained by the Tix
scheme mechanism. Available options include: scheme mechanism. Available options include:
active_bg active_fg bg active_bg active_fg bg
@ -576,7 +576,7 @@ class ButtonBox(TixWidget):
class ComboBox(TixWidget): class ComboBox(TixWidget):
"""ComboBox - an Entry field with a dropdown menu. The user can select a """ComboBox - an Entry field with a dropdown menu. The user can select a
choice by either typing in the entry subwdget or selecting from the choice by either typing in the entry subwidget or selecting from the
listbox subwidget. listbox subwidget.
Subwidget Class Subwidget Class
@ -869,7 +869,7 @@ class HList(TixWidget, XView, YView):
"""HList - Hierarchy display widget can be used to display any data """HList - Hierarchy display widget can be used to display any data
that have a hierarchical structure, for example, file system directory that have a hierarchical structure, for example, file system directory
trees. The list entries are indented and connected by branch lines trees. The list entries are indented and connected by branch lines
according to their places in the hierachy. according to their places in the hierarchy.
Subwidgets - None""" Subwidgets - None"""
@ -1519,7 +1519,7 @@ class TList(TixWidget, XView, YView):
self.tk.call(self._w, 'selection', 'set', first, last) self.tk.call(self._w, 'selection', 'set', first, last)
class Tree(TixWidget): class Tree(TixWidget):
"""Tree - The tixTree widget can be used to display hierachical """Tree - The tixTree widget can be used to display hierarchical
data in a tree form. The user can adjust data in a tree form. The user can adjust
the view of the tree by opening or closing parts of the tree.""" the view of the tree by opening or closing parts of the tree."""

View File

@ -707,7 +707,7 @@ class Combobox(Entry):
textvariable, values, width textvariable, values, width
""" """
# The "values" option may need special formatting, so leave to # The "values" option may need special formatting, so leave to
# _format_optdict the responsability to format it # _format_optdict the responsibility to format it
if "values" in kw: if "values" in kw:
kw["values"] = _format_optdict({'v': kw["values"]})[1] kw["values"] = _format_optdict({'v': kw["values"]})[1]

View File

@ -1488,7 +1488,7 @@ class TurtleScreen(TurtleScreenBase):
Optional arguments: Optional arguments:
canvwidth -- positive integer, new width of canvas in pixels canvwidth -- positive integer, new width of canvas in pixels
canvheight -- positive integer, new height of canvas in pixels canvheight -- positive integer, new height of canvas in pixels
bg -- colorstring or color-tupel, new backgroundcolor bg -- colorstring or color-tuple, new backgroundcolor
If no arguments are given, return current (canvaswidth, canvasheight) If no arguments are given, return current (canvaswidth, canvasheight)
Do not alter the drawing window. To observe hidden parts of Do not alter the drawing window. To observe hidden parts of
@ -3242,9 +3242,9 @@ class RawTurtle(TPen, TNavigator):
fill="", width=ps) fill="", width=ps)
# Turtle now at position old, # Turtle now at position old,
self._position = old self._position = old
## if undo is done during crating a polygon, the last vertex ## if undo is done during creating a polygon, the last vertex
## will be deleted. if the polygon is entirel deleted, ## will be deleted. if the polygon is entirely deleted,
## creatigPoly will be set to False. ## creatingPoly will be set to False.
## Polygons created before the last one will not be affected by undo() ## Polygons created before the last one will not be affected by undo()
if self._creatingPoly: if self._creatingPoly:
if len(self._poly) > 0: if len(self._poly) > 0:
@ -3796,7 +3796,7 @@ class _Screen(TurtleScreen):
class Turtle(RawTurtle): class Turtle(RawTurtle):
"""RawTurtle auto-crating (scrolled) canvas. """RawTurtle auto-creating (scrolled) canvas.
When a Turtle object is created or a function derived from some When a Turtle object is created or a function derived from some
Turtle method is called a TurtleScreen object is automatically created. Turtle method is called a TurtleScreen object is automatically created.
@ -3836,7 +3836,7 @@ def write_docstringdict(filename="turtle_docstringdict"):
filename -- a string, used as filename filename -- a string, used as filename
default value is turtle_docstringdict default value is turtle_docstringdict
Has to be called explicitely, (not used by the turtle-graphics classes) Has to be called explicitly, (not used by the turtle-graphics classes)
The docstring dictionary will be written to the Python script <filname>.py The docstring dictionary will be written to the Python script <filname>.py
It is intended to serve as a template for translation of the docstrings It is intended to serve as a template for translation of the docstrings
into different languages. into different languages.

View File

@ -4,7 +4,7 @@
tdemo_bytedesign.py tdemo_bytedesign.py
An example adapted from the example-suite An example adapted from the example-suite
of PythonCard's turtle graphcis. of PythonCard's turtle graphics.
It's based on an article in BYTE magazine It's based on an article in BYTE magazine
Problem Solving with Logo: Using Turtle Problem Solving with Logo: Using Turtle

View File

@ -6,7 +6,7 @@
# #
# NodeList -- lightest possible NodeList implementation # NodeList -- lightest possible NodeList implementation
# #
# EmptyNodeList -- lightest possible NodeList that is guarateed to # EmptyNodeList -- lightest possible NodeList that is guaranteed to
# remain empty (immutable) # remain empty (immutable)
# #
# StringTypes -- tuple of defined string types # StringTypes -- tuple of defined string types

View File

@ -1905,7 +1905,7 @@ def _clone_node(node, deep, newOwnerDocument):
e._call_user_data_handler(operation, n, entity) e._call_user_data_handler(operation, n, entity)
else: else:
# Note the cloning of Document and DocumentType nodes is # Note the cloning of Document and DocumentType nodes is
# implemenetation specific. minidom handles those cases # implementation specific. minidom handles those cases
# directly in the cloneNode() methods. # directly in the cloneNode() methods.
raise xml.dom.NotSupportedErr("Cannot clone node %s" % repr(node)) raise xml.dom.NotSupportedErr("Cannot clone node %s" % repr(node))

View File

@ -240,7 +240,7 @@ class SimpleXMLRPCDispatcher:
marshalled data. For backwards compatibility, a dispatch marshalled data. For backwards compatibility, a dispatch
function can be provided as an argument (see comment in function can be provided as an argument (see comment in
SimpleXMLRPCRequestHandler.do_POST) but overriding the SimpleXMLRPCRequestHandler.do_POST) but overriding the
existing method through subclassing is the prefered means existing method through subclassing is the preferred means
of changing method dispatch behavior. of changing method dispatch behavior.
""" """

View File

@ -362,7 +362,7 @@ def fileContents(fn):
def runCommand(commandline): def runCommand(commandline):
""" """
Run a command and raise RuntimeError if it fails. Output is surpressed Run a command and raise RuntimeError if it fails. Output is suppressed
unless the command fails. unless the command fails.
""" """
fd = os.popen(commandline, 'r') fd = os.popen(commandline, 'r')

View File

@ -3317,7 +3317,7 @@ PyCFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
/* XXX XXX This would allow to pass additional options. For COM /* XXX XXX This would allow to pass additional options. For COM
method *implementations*, we would probably want different method *implementations*, we would probably want different
behaviour than in 'normal' callback functions: return a HRESULT if behaviour than in 'normal' callback functions: return a HRESULT if
an exception occurrs in the callback, and print the traceback not an exception occurs in the callback, and print the traceback not
only on the console, but also to OutputDebugString() or something only on the console, but also to OutputDebugString() or something
like that. like that.
*/ */

View File

@ -202,7 +202,7 @@ static void _CallPythonObject(void *mem,
/* XXX XXX XX /* XXX XXX XX
We have the problem that c_byte or c_short have dict->size of We have the problem that c_byte or c_short have dict->size of
1 resp. 4, but these parameters are pushed as sizeof(int) bytes. 1 resp. 4, but these parameters are pushed as sizeof(int) bytes.
BTW, the same problem occurrs when they are pushed as parameters BTW, the same problem occurs when they are pushed as parameters
*/ */
} else if (dict) { } else if (dict) {
/* Hm, shouldn't we use PyCData_AtAddress() or something like that instead? */ /* Hm, shouldn't we use PyCData_AtAddress() or something like that instead? */

View File

@ -29,7 +29,7 @@
4. _ctypes_callproc is then called with the 'callargs' tuple. _ctypes_callproc first 4. _ctypes_callproc is then called with the 'callargs' tuple. _ctypes_callproc first
allocates two arrays. The first is an array of 'struct argument' items, the allocates two arrays. The first is an array of 'struct argument' items, the
second array has 'void *' entried. second array has 'void *' entries.
5. If 'converters' are present (converters is a sequence of argtypes' 5. If 'converters' are present (converters is a sequence of argtypes'
from_param methods), for each item in 'callargs' converter is called and the from_param methods), for each item in 'callargs' converter is called and the

View File

@ -242,7 +242,7 @@ partial_repr(partialobject *pto)
__reduce__ by itself doesn't support getting kwargs in the unpickle __reduce__ by itself doesn't support getting kwargs in the unpickle
operation so we define a __setstate__ that replaces all the information operation so we define a __setstate__ that replaces all the information
about the partial. If we only replaced part of it someone would use about the partial. If we only replaced part of it someone would use
it as a hook to do stange things. it as a hook to do strange things.
*/ */
static PyObject * static PyObject *

View File

@ -50,7 +50,7 @@ PyDoc_STRVAR(iobase_doc,
"stream.\n" "stream.\n"
"\n" "\n"
"IOBase also supports the :keyword:`with` statement. In this example,\n" "IOBase also supports the :keyword:`with` statement. In this example,\n"
"fp is closed after the suite of the with statment is complete:\n" "fp is closed after the suite of the with statement is complete:\n"
"\n" "\n"
"with open('spam.txt', 'r') as fp:\n" "with open('spam.txt', 'r') as fp:\n"
" fp.write('Spam and eggs!')\n"); " fp.write('Spam and eggs!')\n");

View File

@ -157,7 +157,7 @@ write_str(stringio *self, PyObject *obj)
0 lo string_size hi 0 lo string_size hi
| |<---used--->|<----------available----------->| | |<---used--->|<----------available----------->|
| | <--to pad-->|<---to write---> | | | <--to pad-->|<---to write---> |
0 buf positon 0 buf position
*/ */
memset(self->buf + self->string_size, '\0', memset(self->buf + self->string_size, '\0',

View File

@ -6259,7 +6259,7 @@ initmodule(void)
goto error; goto error;
if (!PyDict_CheckExact(name_mapping_3to2)) { if (!PyDict_CheckExact(name_mapping_3to2)) {
PyErr_Format(PyExc_RuntimeError, PyErr_Format(PyExc_RuntimeError,
"_compat_pickle.REVERSE_NAME_MAPPING shouldbe a dict, " "_compat_pickle.REVERSE_NAME_MAPPING should be a dict, "
"not %.200s", Py_TYPE(name_mapping_3to2)->tp_name); "not %.200s", Py_TYPE(name_mapping_3to2)->tp_name);
goto error; goto error;
} }

View File

@ -55,7 +55,7 @@ typedef struct
/* None for autocommit, otherwise a PyString with the isolation level */ /* None for autocommit, otherwise a PyString with the isolation level */
PyObject* isolation_level; PyObject* isolation_level;
/* NULL for autocommit, otherwise a string with the BEGIN statment; will be /* NULL for autocommit, otherwise a string with the BEGIN statement; will be
* freed in connection destructor */ * freed in connection destructor */
char* begin_statement; char* begin_statement;

View File

@ -23,7 +23,7 @@
/* /*
CM_LARGE_DOUBLE is used to avoid spurious overflow in the sqrt, log, CM_LARGE_DOUBLE is used to avoid spurious overflow in the sqrt, log,
inverse trig and inverse hyperbolic trig functions. Its log is used in the inverse trig and inverse hyperbolic trig functions. Its log is used in the
evaluation of exp, cos, cosh, sin, sinh, tan, and tanh to avoid unecessary evaluation of exp, cos, cosh, sin, sinh, tan, and tanh to avoid unnecessary
overflow. overflow.
*/ */

View File

@ -2242,7 +2242,7 @@ will allow before refusing new connections.");
* This is the guts of the recv() and recv_into() methods, which reads into a * This is the guts of the recv() and recv_into() methods, which reads into a
* char buffer. If you have any inc/dec ref to do to the objects that contain * char buffer. If you have any inc/dec ref to do to the objects that contain
* the buffer, do it in the caller. This function returns the number of bytes * the buffer, do it in the caller. This function returns the number of bytes
* succesfully read. If there was an error, it returns -1. Note that it is * successfully read. If there was an error, it returns -1. Note that it is
* also possible that we return a number of bytes smaller than the request * also possible that we return a number of bytes smaller than the request
* bytes. * bytes.
*/ */
@ -2446,7 +2446,7 @@ See recv() for documentation about the flags.");
* This is the guts of the recvfrom() and recvfrom_into() methods, which reads * This is the guts of the recvfrom() and recvfrom_into() methods, which reads
* into a char buffer. If you have any inc/def ref to do to the objects that * into a char buffer. If you have any inc/def ref to do to the objects that
* contain the buffer, do it in the caller. This function returns the number * contain the buffer, do it in the caller. This function returns the number
* of bytes succesfully read. If there was an error, it returns -1. Note * of bytes successfully read. If there was an error, it returns -1. Note
* that it is also possible that we return a number of bytes smaller than the * that it is also possible that we return a number of bytes smaller than the
* request bytes. * request bytes.
* *
@ -2541,9 +2541,9 @@ sock_recvfrom(PySocketSockObject *s, PyObject *args)
if (outlen != recvlen) { if (outlen != recvlen) {
/* We did not read as many bytes as we anticipated, resize the /* We did not read as many bytes as we anticipated, resize the
string if possible and be succesful. */ string if possible and be successful. */
if (_PyBytes_Resize(&buf, outlen) < 0) if (_PyBytes_Resize(&buf, outlen) < 0)
/* Oopsy, not so succesful after all. */ /* Oopsy, not so successful after all. */
goto finally; goto finally;
} }
@ -4372,7 +4372,7 @@ os_init(void)
return 0; /* Failure */ return 0; /* Failure */
#else #else
/* No need to initialise sockets with GCC/EMX */ /* No need to initialize sockets with GCC/EMX */
return 1; /* Success */ return 1; /* Success */
#endif #endif
} }
@ -4406,7 +4406,7 @@ PySocketModule_APIObject PySocketModuleAPI =
"socket.py" which implements some additional functionality. "socket.py" which implements some additional functionality.
The import of "_socket" may fail with an ImportError exception if The import of "_socket" may fail with an ImportError exception if
os-specific initialization fails. On Windows, this does WINSOCK os-specific initialization fails. On Windows, this does WINSOCK
initialization. When WINSOCK is initialized succesfully, a call to initialization. When WINSOCK is initialized successfully, a call to
WSACleanup() is scheduled to be made at exit time. WSACleanup() is scheduled to be made at exit time.
*/ */

View File

@ -697,7 +697,7 @@ time_mktime(PyObject *self, PyObject *tup)
buf.tm_wday = -1; /* sentinel; original value ignored */ buf.tm_wday = -1; /* sentinel; original value ignored */
tt = mktime(&buf); tt = mktime(&buf);
/* Return value of -1 does not necessarily mean an error, but tm_wday /* Return value of -1 does not necessarily mean an error, but tm_wday
* cannot remain set to -1 if mktime succedded. */ * cannot remain set to -1 if mktime succeeded. */
if (tt == (time_t)(-1) && buf.tm_wday == -1) { if (tt == (time_t)(-1) && buf.tm_wday == -1) {
PyErr_SetString(PyExc_OverflowError, PyErr_SetString(PyExc_OverflowError,
"mktime argument out of range"); "mktime argument out of range");

View File

@ -1120,7 +1120,7 @@ parse_dostime(int dostime, int dosdate)
} }
/* Given a path to a .pyc or .pyo file in the archive, return the /* Given a path to a .pyc or .pyo file in the archive, return the
modifictaion time of the matching .py file, or 0 if no source modification time of the matching .py file, or 0 if no source
is available. */ is available. */
static time_t static time_t
get_mtime_of_source(ZipImporter *self, char *path) get_mtime_of_source(ZipImporter *self, char *path)

View File

@ -188,7 +188,7 @@ typedef struct internal_state {
int nice_match; /* Stop searching when current match exceeds this */ int nice_match; /* Stop searching when current match exceeds this */
/* used by trees.c: */ /* used by trees.c: */
/* Didn't use ct_data typedef below to supress compiler warning */ /* Didn't use ct_data typedef below to suppress compiler warning */
struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */
struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */

View File

@ -256,7 +256,7 @@ int ZEXPORT gzsetparams (file, level, strategy)
/* =========================================================================== /* ===========================================================================
Read a byte from a gz_stream; update next_in and avail_in. Return EOF Read a byte from a gz_stream; update next_in and avail_in. Return EOF
for end of file. for end of file.
IN assertion: the stream s has been sucessfully opened for reading. IN assertion: the stream s has been successfully opened for reading.
*/ */
local int get_byte(s) local int get_byte(s)
gz_stream *s; gz_stream *s;
@ -281,7 +281,7 @@ local int get_byte(s)
mode to transparent if the gzip magic header is not present; set s->err mode to transparent if the gzip magic header is not present; set s->err
to Z_DATA_ERROR if the magic header is present but the rest of the header to Z_DATA_ERROR if the magic header is present but the rest of the header
is incorrect. is incorrect.
IN assertion: the stream s has already been created sucessfully; IN assertion: the stream s has already been created successfully;
s->stream.avail_in is zero for the first time, but may be non-zero s->stream.avail_in is zero for the first time, but may be non-zero
for concatenated .gz files. for concatenated .gz files.
*/ */

View File

@ -2080,7 +2080,7 @@ dict_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
assert(d->ma_table == NULL && d->ma_fill == 0 && d->ma_used == 0); assert(d->ma_table == NULL && d->ma_fill == 0 && d->ma_used == 0);
INIT_NONZERO_DICT_SLOTS(d); INIT_NONZERO_DICT_SLOTS(d);
d->ma_lookup = lookdict_unicode; d->ma_lookup = lookdict_unicode;
/* The object has been implicitely tracked by tp_alloc */ /* The object has been implicitly tracked by tp_alloc */
if (type == &PyDict_Type) if (type == &PyDict_Type)
_PyObject_GC_UNTRACK(d); _PyObject_GC_UNTRACK(d);
#ifdef SHOW_CONVERSION_COUNTS #ifdef SHOW_CONVERSION_COUNTS

View File

@ -11,7 +11,7 @@
/* Ensure ob_item has room for at least newsize elements, and set /* Ensure ob_item has room for at least newsize elements, and set
* ob_size to newsize. If newsize > ob_size on entry, the content * ob_size to newsize. If newsize > ob_size on entry, the content
* of the new slots at exit is undefined heap trash; it's the caller's * of the new slots at exit is undefined heap trash; it's the caller's
* responsiblity to overwrite them with sane values. * responsibility to overwrite them with sane values.
* The number of allocated elements may grow, shrink, or stay the same. * The number of allocated elements may grow, shrink, or stay the same.
* Failure is impossible if newsize <= self.allocated on entry, although * Failure is impossible if newsize <= self.allocated on entry, although
* that partly relies on an assumption that the system realloc() never * that partly relies on an assumption that the system realloc() never

View File

@ -709,7 +709,7 @@ _PyLong_FromByteArray(const unsigned char* bytes, size_t n,
is_signed = *pendbyte >= 0x80; is_signed = *pendbyte >= 0x80;
/* Compute numsignificantbytes. This consists of finding the most /* Compute numsignificantbytes. This consists of finding the most
significant byte. Leading 0 bytes are insignficant if the number significant byte. Leading 0 bytes are insignificant if the number
is positive, and leading 0xff bytes if negative. */ is positive, and leading 0xff bytes if negative. */
{ {
size_t i; size_t i;

View File

@ -1008,7 +1008,7 @@ subtype_dealloc(PyObject *self)
self has a refcount of 0, and if gc ever gets its hands on it self has a refcount of 0, and if gc ever gets its hands on it
(which can happen if any weakref callback gets invoked), it (which can happen if any weakref callback gets invoked), it
looks like trash to gc too, and gc also tries to delete self looks like trash to gc too, and gc also tries to delete self
then. But we're already deleting self. Double dealloction is then. But we're already deleting self. Double deallocation is
a subtle disaster. a subtle disaster.
Q. Why the bizarre (net-zero) manipulation of Q. Why the bizarre (net-zero) manipulation of
@ -5955,7 +5955,7 @@ recurse_down_subclasses(PyTypeObject *type, PyObject *name,
slots compete for the same descriptor (for example both sq_item and slots compete for the same descriptor (for example both sq_item and
mp_subscript generate a __getitem__ descriptor). mp_subscript generate a __getitem__ descriptor).
In the latter case, the first slotdef entry encoutered wins. Since In the latter case, the first slotdef entry encountered wins. Since
slotdef entries are sorted by the offset of the slot in the slotdef entries are sorted by the offset of the slot in the
PyHeapTypeObject, this gives us some control over disambiguating PyHeapTypeObject, this gives us some control over disambiguating
between competing slots: the members of PyHeapTypeObject are listed between competing slots: the members of PyHeapTypeObject are listed

View File

@ -54,7 +54,7 @@ BOOL ensure_directory(char *pathname, char *new_part, NOTIFYPROC notify)
return TRUE; return TRUE;
} }
/* XXX Should better explicitely specify /* XXX Should better explicitly specify
* uncomp_size and file_times instead of pfhdr! * uncomp_size and file_times instead of pfhdr!
*/ */
char *map_new_file(DWORD flags, char *filename, char *map_new_file(DWORD flags, char *filename,
@ -164,7 +164,7 @@ extract_file(char *dst, char *src, int method, int comp_size,
zstream.avail_out = uncomp_size; zstream.avail_out = uncomp_size;
/* Apparently an undocumented feature of zlib: Set windowsize /* Apparently an undocumented feature of zlib: Set windowsize
to negative values to supress the gzip header and be compatible with to negative values to suppress the gzip header and be compatible with
zip! */ zip! */
result = TRUE; result = TRUE;
if (Z_OK != (x = inflateInit2(&zstream, -15))) { if (Z_OK != (x = inflateInit2(&zstream, -15))) {

View File

@ -148,7 +148,7 @@ BOOL pyc_compile, pyo_compile;
the permissions of the current user. */ the permissions of the current user. */
HKEY hkey_root = (HKEY)-1; HKEY hkey_root = (HKEY)-1;
BOOL success; /* Installation successfull? */ BOOL success; /* Installation successful? */
char *failure_reason = NULL; char *failure_reason = NULL;
HANDLE hBitmap; HANDLE hBitmap;
@ -797,7 +797,7 @@ run_installscript(char *pathname, int argc, char **argv, char **pOutput)
tempname = tempnam(NULL, NULL); tempname = tempnam(NULL, NULL);
// We use a static CRT while the Python version we load uses // We use a static CRT while the Python version we load uses
// the CRT from one of various possibile DLLs. As a result we // the CRT from one of various possible DLLs. As a result we
// need to redirect the standard handles using the API rather // need to redirect the standard handles using the API rather
// than the CRT. // than the CRT.
redirected = CreateFile( redirected = CreateFile(

View File

@ -188,7 +188,7 @@ inv_handle:
return NULL; return NULL;
} }
/* free dynamicaly-linked library */ /* free dynamically-linked library */
int dlclose(void *handle) int dlclose(void *handle)
{ {
int rc; int rc;

View File

@ -42,7 +42,7 @@ void *dlopen(char *filename, int flags);
/* return a pointer to the `symbol' in DLL */ /* return a pointer to the `symbol' in DLL */
void *dlsym(void *handle, char *symbol); void *dlsym(void *handle, char *symbol);
/* free dynamicaly-linked library */ /* free dynamically-linked library */
int dlclose(void *handle); int dlclose(void *handle);
/* return a string describing last occurred dl error */ /* return a string describing last occurred dl error */

View File

@ -26,7 +26,7 @@
typedef unsigned long long uint64; typedef unsigned long long uint64;
/* PowerPC suppport. /* PowerPC support.
"__ppc__" appears to be the preprocessor definition to detect on OS X, whereas "__ppc__" appears to be the preprocessor definition to detect on OS X, whereas
"__powerpc__" appears to be the correct one for Linux with GCC "__powerpc__" appears to be the correct one for Linux with GCC
*/ */
@ -1266,7 +1266,7 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
if (_Py_atomic_load_relaxed(&eval_breaker)) { if (_Py_atomic_load_relaxed(&eval_breaker)) {
if (*next_instr == SETUP_FINALLY) { if (*next_instr == SETUP_FINALLY) {
/* Make the last opcode before /* Make the last opcode before
a try: finally: block uninterruptable. */ a try: finally: block uninterruptible. */
goto fast_next_opcode; goto fast_next_opcode;
} }
tstate->tick_counter++; tstate->tick_counter++;

View File

@ -512,7 +512,7 @@ _PyThread_CurrentFrames(void)
/* for i in all interpreters: /* for i in all interpreters:
* for t in all of i's thread states: * for t in all of i's thread states:
* if t's frame isn't NULL, map t's id to its frame * if t's frame isn't NULL, map t's id to its frame
* Because these lists can mutute even when the GIL is held, we * Because these lists can mutate even when the GIL is held, we
* need to grab head_mutex for the duration. * need to grab head_mutex for the duration.
*/ */
HEAD_LOCK(); HEAD_LOCK();

View File

@ -40,7 +40,7 @@
#endif #endif
/* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then /* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then
enough of the Posix threads package is implimented to support python enough of the Posix threads package is implemented to support python
threads. threads.
This is valid for HP-UX 11.23 running on an ia64 system. If needed, add This is valid for HP-UX 11.23 running on an ia64 system. If needed, add

View File

@ -7,7 +7,7 @@ I dont consider it worth parsing the MSVC makefiles for compiler options. Even
we get it just right, a specific freeze application may have specific compiler we get it just right, a specific freeze application may have specific compiler
options anyway (eg, to enable or disable specific functionality) options anyway (eg, to enable or disable specific functionality)
So my basic stragtegy is: So my basic strategy is:
* Have some Windows INI files which "describe" one or more extension modules. * Have some Windows INI files which "describe" one or more extension modules.
(Freeze comes with a default one for all known modules - but you can specify (Freeze comes with a default one for all known modules - but you can specify

View File

@ -188,7 +188,7 @@ def fix(filename):
except os.error as msg: except os.error as msg:
err(filename + ': rename failed (' + str(msg) + ')\n') err(filename + ': rename failed (' + str(msg) + ')\n')
return 1 return 1
# Return succes # Return success
return 0 return 0
# Tokenizing ANSI C (partly) # Tokenizing ANSI C (partly)

View File

@ -1002,7 +1002,7 @@ class Hash:
poly = size + poly poly = size + poly
break break
else: else:
raise AssertionError("ran out of polynominals") raise AssertionError("ran out of polynomials")
print(size, "slots in hash table") print(size, "slots in hash table")

View File

@ -1017,8 +1017,8 @@ class PyBuildExt(build_ext):
if sys.platform == 'darwin': if sys.platform == 'darwin':
# In every directory on the search path search for a dynamic # In every directory on the search path search for a dynamic
# library and then a static library, instead of first looking # library and then a static library, instead of first looking
# for dynamic libraries on the entiry path. # for dynamic libraries on the entire path.
# This way a staticly linked custom sqlite gets picked up # This way a statically linked custom sqlite gets picked up
# before the dynamic library in /usr/lib. # before the dynamic library in /usr/lib.
sqlite_extra_link_args = ('-Wl,-search_paths_first',) sqlite_extra_link_args = ('-Wl,-search_paths_first',)
else: else: