Merged revisions 66394,66404,66412,66414,66424-66436 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ........ r66394 | benjamin.peterson | 2008-09-11 17:04:02 -0500 (Thu, 11 Sep 2008) | 1 line fix typo ........ r66404 | gerhard.haering | 2008-09-12 08:54:06 -0500 (Fri, 12 Sep 2008) | 2 lines sqlite3 module: Mark iterdump() method as "Non-standard" like all the other methods not found in DB-API. ........ r66412 | gerhard.haering | 2008-09-12 13:58:57 -0500 (Fri, 12 Sep 2008) | 2 lines Fixes issue #3103. In the sqlite3 module, made one more function static. All renaming public symbos now have the pysqlite prefix to avoid name clashes. This at least once created problems where the same symbol name appeared somewhere in Apache and the sqlite3 module was used from mod_python. ........ r66414 | gerhard.haering | 2008-09-12 17:33:22 -0500 (Fri, 12 Sep 2008) | 2 lines Issue #3846: Release GIL during calls to sqlite3_prepare. This improves concurrent access to the same database file from multiple threads/processes. ........ r66424 | andrew.kuchling | 2008-09-12 20:22:08 -0500 (Fri, 12 Sep 2008) | 1 line #687648 from Robert Schuppenies: use classic division. (RM Barry gave permission to update the demos.) ........ r66425 | andrew.kuchling | 2008-09-12 20:27:33 -0500 (Fri, 12 Sep 2008) | 1 line #687648 from Robert Schuppenies: use classic division. From me: don't use string exception; flush stdout after printing ........ r66426 | andrew.kuchling | 2008-09-12 20:34:41 -0500 (Fri, 12 Sep 2008) | 1 line #687648 from Robert Schuppenies: use classic division. From me: don't use string exception; add __main__ section ........ r66427 | andrew.kuchling | 2008-09-12 20:42:55 -0500 (Fri, 12 Sep 2008) | 1 line #687648 from Robert Schuppenies: use classic division. From me: remove two stray semicolons ........ r66428 | andrew.kuchling | 2008-09-12 20:43:28 -0500 (Fri, 12 Sep 2008) | 1 line #687648 from Robert Schuppenies: use classic division. ........ r66429 | andrew.kuchling | 2008-09-12 20:47:02 -0500 (Fri, 12 Sep 2008) | 1 line Remove semicolon ........ r66430 | andrew.kuchling | 2008-09-12 20:48:36 -0500 (Fri, 12 Sep 2008) | 1 line Subclass exception ........ r66431 | andrew.kuchling | 2008-09-12 20:56:56 -0500 (Fri, 12 Sep 2008) | 1 line Fix SyntaxError ........ r66432 | andrew.kuchling | 2008-09-12 20:57:25 -0500 (Fri, 12 Sep 2008) | 1 line Update uses of string exceptions ........ r66433 | andrew.kuchling | 2008-09-12 21:08:30 -0500 (Fri, 12 Sep 2008) | 1 line Use title case ........ r66434 | andrew.kuchling | 2008-09-12 21:09:15 -0500 (Fri, 12 Sep 2008) | 1 line Remove extra 'the'; the following title includes it ........ r66435 | andrew.kuchling | 2008-09-12 21:11:51 -0500 (Fri, 12 Sep 2008) | 1 line #3288: Document as_integer_ratio ........ r66436 | andrew.kuchling | 2008-09-12 21:14:15 -0500 (Fri, 12 Sep 2008) | 1 line Use title case ........
This commit is contained in:
parent
e40a21376a
commit
d7b032841a
|
@ -68,7 +68,7 @@ def _days_in_year(year): # number of days in year
|
|||
return 365 + _is_leap(year)
|
||||
|
||||
def _days_before_year(year): # number of days before year
|
||||
return year*365 + (year+3)/4 - (year+99)/100 + (year+399)/400
|
||||
return year*365 + (year+3)//4 - (year+99)//100 + (year+399)//400
|
||||
|
||||
def _days_in_month(month, year): # number of days in month of year
|
||||
if month == 2 and _is_leap(year): return 29
|
||||
|
@ -92,9 +92,9 @@ def _num2date(n): # return date with ordinal n
|
|||
del ans.ord, ans.month, ans.day, ans.year # un-initialize it
|
||||
ans.ord = n
|
||||
|
||||
n400 = (n-1)/_DI400Y # # of 400-year blocks preceding
|
||||
n400 = (n-1)//_DI400Y # # of 400-year blocks preceding
|
||||
year, n = 400 * n400, n - _DI400Y * n400
|
||||
more = n / 365
|
||||
more = n // 365
|
||||
dby = _days_before_year(more)
|
||||
if dby >= n:
|
||||
more = more - 1
|
||||
|
@ -104,7 +104,7 @@ def _num2date(n): # return date with ordinal n
|
|||
try: year = int(year) # chop to int, if it fits
|
||||
except (ValueError, OverflowError): pass
|
||||
|
||||
month = min(n/29 + 1, 12)
|
||||
month = min(n//29 + 1, 12)
|
||||
dbm = _days_before_month(month, year)
|
||||
if dbm >= n:
|
||||
month = month - 1
|
||||
|
@ -174,7 +174,9 @@ def today():
|
|||
local = time.localtime(time.time())
|
||||
return Date(local[1], local[2], local[0])
|
||||
|
||||
DateTestError = 'DateTestError'
|
||||
class DateTestError(Exception):
|
||||
pass
|
||||
|
||||
def test(firstyear, lastyear):
|
||||
a = Date(9,30,1913)
|
||||
b = Date(9,30,1914)
|
||||
|
@ -220,3 +222,6 @@ def test(firstyear, lastyear):
|
|||
(fd.month,fd.day,fd.year,ld.month,ld.day,ld.year):
|
||||
raise DateTestError('num->date failed', y)
|
||||
y = y + 1
|
||||
|
||||
if __name__ == '__main__':
|
||||
test(1850, 2150)
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
|
||||
import sys; rprt = sys.stderr.write #for debugging
|
||||
|
||||
error = 'bitvec.error'
|
||||
class error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _check_value(value):
|
||||
|
|
|
@ -32,7 +32,7 @@ def MDTimeTrial():
|
|||
|
||||
filsiz = 1 << 8
|
||||
filler = makestr(0, filsiz-1)
|
||||
data = filler * (TEST_BLOCK_SIZE / filsiz);
|
||||
data = filler * (TEST_BLOCK_SIZE // filsiz)
|
||||
data = data + filler[:(TEST_BLOCK_SIZE % filsiz)]
|
||||
|
||||
del filsiz, filler
|
||||
|
@ -62,7 +62,7 @@ def MDString(str):
|
|||
|
||||
|
||||
def MDFile(filename):
|
||||
f = open(filename, 'rb');
|
||||
f = open(filename, 'rb')
|
||||
mdContext = md5.new()
|
||||
|
||||
while 1:
|
||||
|
|
|
@ -202,7 +202,7 @@ def recvfile_real(local, remote, name):
|
|||
dt = t2-t1
|
||||
print(size, "bytes in", round(dt), "seconds", end=' ')
|
||||
if dt:
|
||||
print("i.e.", int(size/dt), "bytes/sec", end=' ')
|
||||
print("i.e.", int(size//dt), "bytes/sec", end=' ')
|
||||
print()
|
||||
remote._recv(id) # ignored
|
||||
|
||||
|
|
|
@ -194,7 +194,8 @@ def test():
|
|||
fh = sf[1]
|
||||
if fh:
|
||||
ncl = NFSClient(host)
|
||||
print(ncl.Getattr(fh))
|
||||
attrstat = ncl.Getattr(fh)
|
||||
print(attrstat)
|
||||
list = ncl.Listdir(fh)
|
||||
for item in list: print(item)
|
||||
mcl.Umnt(filesys)
|
||||
|
|
|
@ -80,9 +80,9 @@ class Packer(xdr.Packer):
|
|||
|
||||
|
||||
# Exceptions
|
||||
BadRPCFormat = 'rpc.BadRPCFormat'
|
||||
BadRPCVersion = 'rpc.BadRPCVersion'
|
||||
GarbageArgs = 'rpc.GarbageArgs'
|
||||
class BadRPCFormat(Exception): pass
|
||||
class BadRPCVersion(Exception): pass
|
||||
class GarbageArgs(Exception): pass
|
||||
|
||||
class Unpacker(xdr.Unpacker):
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ class Packer:
|
|||
def pack_fstring(self, n, s):
|
||||
if n < 0:
|
||||
raise ValueError('fstring size must be nonnegative')
|
||||
n = ((n+3)/4)*4
|
||||
n = ((n + 3)//4)*4
|
||||
data = s[:n]
|
||||
data = data + (n - len(data)) * '\0'
|
||||
self.buf = self.buf + data
|
||||
|
@ -164,7 +164,7 @@ class Unpacker:
|
|||
if n < 0:
|
||||
raise ValueError('fstring size must be nonnegative')
|
||||
i = self.pos
|
||||
j = i + (n+3)/4*4
|
||||
j = i + (n+3)//4*4
|
||||
if j > len(self.buf):
|
||||
raise EOFError
|
||||
self.pos = j
|
||||
|
|
|
@ -8,23 +8,21 @@
|
|||
import sys
|
||||
from math import sqrt
|
||||
|
||||
error = 'fact.error' # exception
|
||||
|
||||
def fact(n):
|
||||
if n < 1: raise error # fact() argument should be >= 1
|
||||
if n < 1: raise ValueError # fact() argument should be >= 1
|
||||
if n == 1: return [] # special case
|
||||
res = []
|
||||
# Treat even factors special, so we can use i = i+2 later
|
||||
while n%2 == 0:
|
||||
res.append(2)
|
||||
n = n/2
|
||||
n = n//2
|
||||
# Try odd numbers up to sqrt(n)
|
||||
limit = sqrt(float(n+1))
|
||||
i = 3
|
||||
while i <= limit:
|
||||
if n%i == 0:
|
||||
res.append(i)
|
||||
n = n/i
|
||||
n = n//i
|
||||
limit = sqrt(n+1)
|
||||
else:
|
||||
i = i+2
|
||||
|
|
|
@ -104,7 +104,7 @@ def main():
|
|||
|
||||
def showbar(dict, title):
|
||||
n = len(title)
|
||||
print('='*((70-n)/2), title, '='*((71-n)/2))
|
||||
print('='*((70-n)//2), title, '='*((71-n)//2))
|
||||
list = []
|
||||
for key in sorted(dict.keys()):
|
||||
n = len(str(key))
|
||||
|
@ -124,7 +124,7 @@ def show(dict, title, maxitems):
|
|||
if len(dict) > maxitems:
|
||||
title = title + ' (first %d)'%maxitems
|
||||
n = len(title)
|
||||
print('='*((70-n)/2), title, '='*((71-n)/2))
|
||||
print('='*((70-n)//2), title, '='*((71-n)//2))
|
||||
list = []
|
||||
for key in dict.keys():
|
||||
list.append((-len(dict[key]), key))
|
||||
|
|
|
@ -83,7 +83,7 @@ def makestatus(name, thisuser):
|
|||
lines.append(line)
|
||||
#
|
||||
if totaljobs:
|
||||
line = '%d K' % ((totalbytes+1023)/1024)
|
||||
line = '%d K' % ((totalbytes+1023)//1024)
|
||||
if totaljobs != len(users):
|
||||
line = line + ' (%d jobs)' % totaljobs
|
||||
if len(users) == 1:
|
||||
|
@ -95,7 +95,7 @@ def makestatus(name, thisuser):
|
|||
line = line + ' (%s first)' % thisuser
|
||||
else:
|
||||
line = line + ' (%d K before %s)' % (
|
||||
(aheadbytes+1023)/1024, thisuser)
|
||||
(aheadbytes+1023)//1024, thisuser)
|
||||
lines.append(line)
|
||||
#
|
||||
sts = pipe.close()
|
||||
|
|
|
@ -110,7 +110,7 @@ def test():
|
|||
def tuple(list):
|
||||
if len(list) == 0: return ()
|
||||
if len(list) == 1: return (list[0],)
|
||||
i = len(list)/2
|
||||
i = len(list)//2
|
||||
return tuple(list[:i]) + tuple(list[i:])
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -320,7 +320,7 @@ def main():
|
|||
tree={}
|
||||
|
||||
# Check that the output directory exists
|
||||
checkopdir(pagedir);
|
||||
checkopdir(pagedir)
|
||||
|
||||
try:
|
||||
print('Connecting to '+newshost+'...')
|
||||
|
|
|
@ -17,11 +17,11 @@ def main():
|
|||
p, q, k = k*k, 2*k+1, k+1
|
||||
a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1
|
||||
# Print common digits
|
||||
d, d1 = a/b, a1/b1
|
||||
d, d1 = a//b, a1//b1
|
||||
while d == d1:
|
||||
output(d)
|
||||
a, a1 = 10*(a%b), 10*(a1%b1)
|
||||
d, d1 = a/b, a1/b1
|
||||
d, d1 = a//b, a1//b1
|
||||
|
||||
def output(d):
|
||||
# Use write() to avoid spaces between the digits
|
||||
|
|
|
@ -99,9 +99,9 @@ def mkdate(xxx_todo_changeme1):
|
|||
# was different then...
|
||||
(year, month, day) = xxx_todo_changeme1
|
||||
days = year*365 # years, roughly
|
||||
days = days + (year+3)/4 # plus leap years, roughly
|
||||
days = days - (year+99)/100 # minus non-leap years every century
|
||||
days = days + (year+399)/400 # plus leap years every 4 centirues
|
||||
days = days + (year+3)//4 # plus leap years, roughly
|
||||
days = days - (year+99)//100 # minus non-leap years every century
|
||||
days = days + (year+399)//400 # plus leap years every 4 centirues
|
||||
for i in range(1, month):
|
||||
if i == 2 and calendar.isleap(year):
|
||||
days = days + 29
|
||||
|
|
|
@ -91,7 +91,7 @@ def sendportcmd(s, f, port):
|
|||
hostname = gethostname()
|
||||
hostaddr = gethostbyname(hostname)
|
||||
hbytes = string.splitfields(hostaddr, '.')
|
||||
pbytes = [repr(port/256), repr(port%256)]
|
||||
pbytes = [repr(port//256), repr(port%256)]
|
||||
bytes = hbytes + pbytes
|
||||
cmd = 'PORT ' + string.joinfields(bytes, ',')
|
||||
s.send(cmd + '\r\n')
|
||||
|
|
|
@ -93,8 +93,8 @@ class _CoEvent:
|
|||
self.e.wait()
|
||||
self.e.clear()
|
||||
|
||||
Killed = 'Coroutine.Killed'
|
||||
EarlyExit = 'Coroutine.EarlyExit'
|
||||
class Killed(Exception): pass
|
||||
class EarlyExit(Exception): pass
|
||||
|
||||
class Coroutine:
|
||||
def __init__(self):
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
# Generator implementation using threads
|
||||
|
||||
import _thread as thread
|
||||
import sys
|
||||
|
||||
Killed = 'Generator.Killed'
|
||||
class Killed(Exception):
|
||||
pass
|
||||
|
||||
class Generator:
|
||||
# Constructor
|
||||
|
@ -16,6 +18,7 @@ class Generator:
|
|||
self.done = 0
|
||||
self.killed = 0
|
||||
thread.start_new_thread(self._start, ())
|
||||
|
||||
# Internal routine
|
||||
def _start(self):
|
||||
try:
|
||||
|
@ -29,6 +32,7 @@ class Generator:
|
|||
if not self.killed:
|
||||
self.done = 1
|
||||
self.getlock.release()
|
||||
|
||||
# Called by producer for each value; raise Killed if no more needed
|
||||
def put(self, value):
|
||||
if self.killed:
|
||||
|
@ -38,6 +42,7 @@ class Generator:
|
|||
self.putlock.acquire() # Wait for next get() call
|
||||
if self.killed:
|
||||
raise Killed
|
||||
|
||||
# Called by producer to get next value; raise EOFError if no more
|
||||
def get(self):
|
||||
if self.killed:
|
||||
|
@ -47,12 +52,14 @@ class Generator:
|
|||
if self.done:
|
||||
raise EOFError # Say there are no more values
|
||||
return self.value
|
||||
|
||||
# Called by consumer if no more values wanted
|
||||
def kill(self):
|
||||
if self.killed:
|
||||
raise TypeError('kill() called on killed generator')
|
||||
self.killed = 1
|
||||
self.putlock.release()
|
||||
|
||||
# Clone constructor
|
||||
def clone(self):
|
||||
return Generator(self.func, self.args)
|
||||
|
@ -64,11 +71,11 @@ def pi(g):
|
|||
p, q, k = k*k, 2*k+1, k+1
|
||||
a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1
|
||||
# Print common digits
|
||||
d, d1 = a/b, a1/b1
|
||||
d, d1 = a//b, a1//b1
|
||||
while d == d1:
|
||||
g.put(int(d))
|
||||
a, a1 = 10*(a%b), 10*(a1%b1)
|
||||
d, d1 = a/b, a1/b1
|
||||
d, d1 = a//b, a1//b1
|
||||
|
||||
def test():
|
||||
g = Generator(pi, ())
|
||||
|
@ -80,5 +87,6 @@ def test():
|
|||
g.kill()
|
||||
while 1:
|
||||
print(h.get(), end=' ')
|
||||
sys.stdout.flush()
|
||||
|
||||
test()
|
||||
|
|
|
@ -35,15 +35,15 @@ class Tkhanoi:
|
|||
|
||||
# Add background bitmap
|
||||
if bitmap:
|
||||
self.bitmap = c.create_bitmap(width/2, height/2,
|
||||
self.bitmap = c.create_bitmap(width//2, height//2,
|
||||
bitmap=bitmap,
|
||||
foreground='blue')
|
||||
|
||||
# Generate pegs
|
||||
pegwidth = 10
|
||||
pegheight = height/2
|
||||
pegdist = width/3
|
||||
x1, y1 = (pegdist-pegwidth)/2, height*1/3
|
||||
pegheight = height//2
|
||||
pegdist = width//3
|
||||
x1, y1 = (pegdist-pegwidth)//2, height*1//3
|
||||
x2, y2 = x1+pegwidth, y1+pegheight
|
||||
self.pegs = []
|
||||
p = c.create_rectangle(x1, y1, x2, y2, fill='black')
|
||||
|
@ -57,14 +57,14 @@ class Tkhanoi:
|
|||
self.tk.update()
|
||||
|
||||
# Generate pieces
|
||||
pieceheight = pegheight/16
|
||||
maxpiecewidth = pegdist*2/3
|
||||
pieceheight = pegheight//16
|
||||
maxpiecewidth = pegdist*2//3
|
||||
minpiecewidth = 2*pegwidth
|
||||
self.pegstate = [[], [], []]
|
||||
self.pieces = {}
|
||||
x1, y1 = (pegdist-maxpiecewidth)/2, y2-pieceheight-2
|
||||
x1, y1 = (pegdist-maxpiecewidth)//2, y2-pieceheight-2
|
||||
x2, y2 = x1+maxpiecewidth, y1+pieceheight
|
||||
dx = (maxpiecewidth-minpiecewidth) / (2*max(1, n-1))
|
||||
dx = (maxpiecewidth-minpiecewidth) // (2*max(1, n-1))
|
||||
for i in range(n, 0, -1):
|
||||
p = c.create_rectangle(x1, y1, x2, y2, fill='red')
|
||||
self.pieces[i] = p
|
||||
|
@ -101,10 +101,10 @@ class Tkhanoi:
|
|||
|
||||
# Move it towards peg b
|
||||
bx1, by1, bx2, by2 = c.bbox(self.pegs[b])
|
||||
newcenter = (bx1+bx2)/2
|
||||
newcenter = (bx1+bx2)//2
|
||||
while 1:
|
||||
x1, y1, x2, y2 = c.bbox(p)
|
||||
center = (x1+x2)/2
|
||||
center = (x1+x2)//2
|
||||
if center == newcenter: break
|
||||
if center > newcenter: c.move(p, -1, 0)
|
||||
else: c.move(p, 1, 0)
|
||||
|
|
|
@ -168,7 +168,7 @@ class Card:
|
|||
self.group = Group(canvas)
|
||||
|
||||
text = "%s %s" % (VALNAMES[value], suit)
|
||||
self.__text = CanvasText(canvas, CARDWIDTH/2, 0,
|
||||
self.__text = CanvasText(canvas, CARDWIDTH//2, 0,
|
||||
anchor=N, fill=self.color, text=text)
|
||||
self.group.addtag_withtag(self.__text)
|
||||
|
||||
|
@ -589,7 +589,7 @@ class Solitaire:
|
|||
|
||||
def animatedmoveto(self, card, dest):
|
||||
for i in range(10, 0, -1):
|
||||
dx, dy = (dest.x-card.x)/i, (dest.y-card.y)/i
|
||||
dx, dy = (dest.x-card.x)//i, (dest.y-card.y)//i
|
||||
card.moveby(dx, dy)
|
||||
self.master.update_idletasks()
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ class Array:
|
|||
if self.speed == "fastest":
|
||||
msecs = 0
|
||||
elif self.speed == "fast":
|
||||
msecs = msecs/10
|
||||
msecs = msecs//10
|
||||
elif self.speed == "single-step":
|
||||
msecs = 1000000000
|
||||
if not self.stop_mainloop:
|
||||
|
@ -320,7 +320,7 @@ class ArrayItem:
|
|||
return outcome
|
||||
|
||||
def position(self):
|
||||
x1 = (self.index+1)*XGRID - WIDTH/2
|
||||
x1 = (self.index+1)*XGRID - WIDTH//2
|
||||
x2 = x1+WIDTH
|
||||
y2 = (self.array.maxvalue+1)*YGRID
|
||||
y1 = y2 - (self.value)*YGRID
|
||||
|
@ -349,7 +349,7 @@ def interpolate(oldpts, newpts, n):
|
|||
res = [tuple(oldpts)]
|
||||
for i in range(1, n):
|
||||
for k in range(len(pts)):
|
||||
pts[k] = oldpts[k] + (newpts[k] - oldpts[k])*i/n
|
||||
pts[k] = oldpts[k] + (newpts[k] - oldpts[k])*i//n
|
||||
res.append(tuple(pts))
|
||||
res.append(tuple(newpts))
|
||||
return res
|
||||
|
@ -359,7 +359,7 @@ def interpolate(oldpts, newpts, n):
|
|||
|
||||
def uniform(array):
|
||||
size = array.getsize()
|
||||
array.setdata([(size+1)/2] * size)
|
||||
array.setdata([(size+1)//2] * size)
|
||||
array.reset("Uniform data, size %d" % size)
|
||||
|
||||
def distinct(array):
|
||||
|
@ -429,7 +429,7 @@ def quicksort(array):
|
|||
j = j-1
|
||||
continue
|
||||
array.message("Choosing pivot")
|
||||
j, i, k = first, (first+last)/2, last-1
|
||||
j, i, k = first, (first+last)//2, last-1
|
||||
if array.compare(k, i) < 0:
|
||||
array.swap(k, i)
|
||||
if array.compare(k, j) < 0:
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
:Release: |version|
|
||||
:Date: |today|
|
||||
|
||||
While the :ref:`reference-index` describes the exact syntax and
|
||||
While :ref:`reference-index` describes the exact syntax and
|
||||
semantics of the Python language, this library reference manual
|
||||
describes the standard library that is distributed with Python. It also
|
||||
describes some of the optional components that are commonly included
|
||||
|
|
|
@ -424,7 +424,18 @@ Notes:
|
|||
Additional Methods on Float
|
||||
---------------------------
|
||||
|
||||
The float type has some additional methods to support conversion to
|
||||
The float type has some additional methods.
|
||||
|
||||
.. method:: float.as_integer_ratio()
|
||||
|
||||
Return a pair of integers whose ratio is exactly equal to the
|
||||
original float and with a positive denominator. Raises
|
||||
:exc:`OverflowError` on infinities and a :exc:`ValueError` on
|
||||
NaNs.
|
||||
|
||||
.. versionadded:: 2.6
|
||||
|
||||
Two methods support conversion to
|
||||
and from hexadecimal strings. Since Python's floats are stored
|
||||
internally as binary numbers, converting a float to or from a
|
||||
*decimal* string usually involves a small rounding error. In
|
||||
|
|
|
@ -348,9 +348,9 @@ following:
|
|||
| | positive numbers, and a minus sign on negative numbers. |
|
||||
+---------+----------------------------------------------------------+
|
||||
|
||||
The ``'#'`` option is only valid for integers, and only for binary,
|
||||
octal, or hexadecimal output. If present, it specifies that the output
|
||||
will be prefixed by ``'0b'``, ``'0o'``, or ``'0x'``, respectively.
|
||||
The ``'#'`` option is only valid for integers, and only for binary, octal, or
|
||||
hexadecimal output. If present, it specifies that the output will be prefixed
|
||||
by ``'0b'``, ``'0o'``, or ``'0x'``, respectively.
|
||||
|
||||
*width* is a decimal integer defining the minimum field width. If not
|
||||
specified, then the field width will be determined by the content.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
.. _reference-index:
|
||||
|
||||
#################################
|
||||
The Python language reference
|
||||
The Python Language Reference
|
||||
#################################
|
||||
|
||||
:Release: |version|
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
.. _tutorial-index:
|
||||
|
||||
######################
|
||||
The Python tutorial
|
||||
The Python Tutorial
|
||||
######################
|
||||
|
||||
:Release: |version|
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
static int pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* isolation_level);
|
||||
|
||||
|
||||
void _sqlite3_result_error(sqlite3_context* ctx, const char* errmsg, int len)
|
||||
static void _sqlite3_result_error(sqlite3_context* ctx, const char* errmsg, int len)
|
||||
{
|
||||
/* in older SQLite versions, calling sqlite3_result_error in callbacks
|
||||
* triggers a bug in SQLite that leads either to irritating results or
|
||||
|
@ -304,7 +304,7 @@ PyObject* _pysqlite_connection_begin(pysqlite_Connection* self)
|
|||
goto error;
|
||||
}
|
||||
|
||||
rc = _sqlite_step_with_busyhandler(statement, self);
|
||||
rc = pysqlite_step(statement, self);
|
||||
if (rc == SQLITE_DONE) {
|
||||
self->inTransaction = 1;
|
||||
} else {
|
||||
|
@ -347,7 +347,7 @@ PyObject* pysqlite_connection_commit(pysqlite_Connection* self, PyObject* args)
|
|||
goto error;
|
||||
}
|
||||
|
||||
rc = _sqlite_step_with_busyhandler(statement, self);
|
||||
rc = pysqlite_step(statement, self);
|
||||
if (rc == SQLITE_DONE) {
|
||||
self->inTransaction = 0;
|
||||
} else {
|
||||
|
@ -393,7 +393,7 @@ PyObject* pysqlite_connection_rollback(pysqlite_Connection* self, PyObject* args
|
|||
goto error;
|
||||
}
|
||||
|
||||
rc = _sqlite_step_with_busyhandler(statement, self);
|
||||
rc = pysqlite_step(statement, self);
|
||||
if (rc == SQLITE_DONE) {
|
||||
self->inTransaction = 0;
|
||||
} else {
|
||||
|
@ -1316,8 +1316,7 @@ static PyMethodDef connection_methods[] = {
|
|||
{"interrupt", (PyCFunction)pysqlite_connection_interrupt, METH_NOARGS,
|
||||
PyDoc_STR("Abort any pending database operation. Non-standard.")},
|
||||
{"iterdump", (PyCFunction)pysqlite_connection_iterdump, METH_NOARGS,
|
||||
PyDoc_STR("Returns iterator to the dump of the database in an SQL text"
|
||||
"format.")},
|
||||
PyDoc_STR("Returns iterator to the dump of the database in an SQL text format. Non-standard.")},
|
||||
{"__enter__", (PyCFunction)pysqlite_connection_enter, METH_NOARGS,
|
||||
PyDoc_STR("For context manager. Non-standard.")},
|
||||
{"__exit__", (PyCFunction)pysqlite_connection_exit, METH_VARARGS,
|
||||
|
|
|
@ -631,7 +631,7 @@ PyObject* _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject*
|
|||
/* Keep trying the SQL statement until the schema stops changing. */
|
||||
while (1) {
|
||||
/* Actually execute the SQL statement. */
|
||||
rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
|
||||
rc = pysqlite_step(self->statement->st, self->connection);
|
||||
if (rc == SQLITE_DONE || rc == SQLITE_ROW) {
|
||||
/* If it worked, let's get out of the loop */
|
||||
break;
|
||||
|
@ -815,11 +815,13 @@ PyObject* pysqlite_cursor_executescript(pysqlite_Cursor* self, PyObject* args)
|
|||
}
|
||||
statement_completed = 1;
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS
|
||||
rc = sqlite3_prepare(self->connection->db,
|
||||
script_cstr,
|
||||
-1,
|
||||
&statement,
|
||||
&script_cstr);
|
||||
Py_END_ALLOW_THREADS
|
||||
if (rc != SQLITE_OK) {
|
||||
_pysqlite_seterror(self->connection->db, NULL);
|
||||
goto error;
|
||||
|
@ -828,7 +830,7 @@ PyObject* pysqlite_cursor_executescript(pysqlite_Cursor* self, PyObject* args)
|
|||
/* execute statement, and ignore results of SELECT statements */
|
||||
rc = SQLITE_ROW;
|
||||
while (rc == SQLITE_ROW) {
|
||||
rc = _sqlite_step_with_busyhandler(statement, self->connection);
|
||||
rc = pysqlite_step(statement, self->connection);
|
||||
/* TODO: we probably need more error handling here */
|
||||
}
|
||||
|
||||
|
@ -896,7 +898,7 @@ PyObject* pysqlite_cursor_iternext(pysqlite_Cursor *self)
|
|||
}
|
||||
|
||||
if (self->statement) {
|
||||
rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
|
||||
rc = pysqlite_step(self->statement->st, self->connection);
|
||||
if (rc != SQLITE_DONE && rc != SQLITE_ROW) {
|
||||
(void)pysqlite_statement_reset(self->statement);
|
||||
Py_DECREF(next_row);
|
||||
|
|
|
@ -35,10 +35,10 @@
|
|||
|
||||
PyObject *psyco_adapters;
|
||||
|
||||
/* microprotocols_init - initialize the adapters dictionary */
|
||||
/* pysqlite_microprotocols_init - initialize the adapters dictionary */
|
||||
|
||||
int
|
||||
microprotocols_init(PyObject *dict)
|
||||
pysqlite_microprotocols_init(PyObject *dict)
|
||||
{
|
||||
/* create adapters dictionary and put it in module namespace */
|
||||
if ((psyco_adapters = PyDict_New()) == NULL) {
|
||||
|
@ -49,10 +49,10 @@ microprotocols_init(PyObject *dict)
|
|||
}
|
||||
|
||||
|
||||
/* microprotocols_add - add a reverse type-caster to the dictionary */
|
||||
/* pysqlite_microprotocols_add - add a reverse type-caster to the dictionary */
|
||||
|
||||
int
|
||||
microprotocols_add(PyTypeObject *type, PyObject *proto, PyObject *cast)
|
||||
pysqlite_microprotocols_add(PyTypeObject *type, PyObject *proto, PyObject *cast)
|
||||
{
|
||||
PyObject* key;
|
||||
int rc;
|
||||
|
@ -70,10 +70,10 @@ microprotocols_add(PyTypeObject *type, PyObject *proto, PyObject *cast)
|
|||
return rc;
|
||||
}
|
||||
|
||||
/* microprotocols_adapt - adapt an object to the built-in protocol */
|
||||
/* pysqlite_microprotocols_adapt - adapt an object to the built-in protocol */
|
||||
|
||||
PyObject *
|
||||
microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
|
||||
pysqlite_microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
|
||||
{
|
||||
PyObject *adapter, *key;
|
||||
|
||||
|
@ -132,11 +132,11 @@ microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
|
|||
/** module-level functions **/
|
||||
|
||||
PyObject *
|
||||
psyco_microprotocols_adapt(pysqlite_Cursor *self, PyObject *args)
|
||||
pysqlite_adapt(pysqlite_Cursor *self, PyObject *args)
|
||||
{
|
||||
PyObject *obj, *alt = NULL;
|
||||
PyObject *proto = (PyObject*)&pysqlite_PrepareProtocolType;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O|OO", &obj, &proto, &alt)) return NULL;
|
||||
return microprotocols_adapt(obj, proto, alt);
|
||||
return pysqlite_microprotocols_adapt(obj, proto, alt);
|
||||
}
|
||||
|
|
|
@ -41,15 +41,15 @@ extern PyObject *psyco_adapters;
|
|||
/** exported functions **/
|
||||
|
||||
/* used by module.c to init the microprotocols system */
|
||||
extern int microprotocols_init(PyObject *dict);
|
||||
extern int microprotocols_add(
|
||||
extern int pysqlite_microprotocols_init(PyObject *dict);
|
||||
extern int pysqlite_microprotocols_add(
|
||||
PyTypeObject *type, PyObject *proto, PyObject *cast);
|
||||
extern PyObject *microprotocols_adapt(
|
||||
extern PyObject *pysqlite_microprotocols_adapt(
|
||||
PyObject *obj, PyObject *proto, PyObject *alt);
|
||||
|
||||
extern PyObject *
|
||||
psyco_microprotocols_adapt(pysqlite_Cursor* self, PyObject *args);
|
||||
#define psyco_microprotocols_adapt_doc \
|
||||
pysqlite_adapt(pysqlite_Cursor* self, PyObject *args);
|
||||
#define pysqlite_adapt_doc \
|
||||
"adapt(obj, protocol, alternate) -> adapt obj to given protocol. Non-standard."
|
||||
|
||||
#endif /* !defined(PSYCOPG_MICROPROTOCOLS_H) */
|
||||
|
|
|
@ -160,7 +160,7 @@ static PyObject* module_register_adapter(PyObject* self, PyObject* args)
|
|||
pysqlite_BaseTypeAdapted = 1;
|
||||
}
|
||||
|
||||
rc = microprotocols_add(type, (PyObject*)&pysqlite_PrepareProtocolType, caster);
|
||||
rc = pysqlite_microprotocols_add(type, (PyObject*)&pysqlite_PrepareProtocolType, caster);
|
||||
if (rc == -1)
|
||||
return NULL;
|
||||
|
||||
|
@ -244,8 +244,8 @@ static PyMethodDef module_methods[] = {
|
|||
METH_VARARGS, module_register_adapter_doc},
|
||||
{"register_converter", (PyCFunction)module_register_converter,
|
||||
METH_VARARGS, module_register_converter_doc},
|
||||
{"adapt", (PyCFunction)psyco_microprotocols_adapt, METH_VARARGS,
|
||||
psyco_microprotocols_adapt_doc},
|
||||
{"adapt", (PyCFunction)pysqlite_adapt, METH_VARARGS,
|
||||
pysqlite_adapt_doc},
|
||||
{"enable_callback_tracebacks", (PyCFunction)enable_callback_tracebacks,
|
||||
METH_VARARGS, enable_callback_tracebacks_doc},
|
||||
{NULL, NULL}
|
||||
|
@ -437,7 +437,7 @@ PyMODINIT_FUNC PyInit__sqlite3(void)
|
|||
Py_DECREF(tmp_obj);
|
||||
|
||||
/* initialize microprotocols layer */
|
||||
microprotocols_init(dict);
|
||||
pysqlite_microprotocols_init(dict);
|
||||
|
||||
/* initialize the default converters */
|
||||
converters_init(dict);
|
||||
|
|
|
@ -69,11 +69,13 @@ int pysqlite_statement_create(pysqlite_Statement* self, pysqlite_Connection* con
|
|||
Py_INCREF(sql);
|
||||
self->sql = sql;
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS
|
||||
rc = sqlite3_prepare(connection->db,
|
||||
sql_cstr,
|
||||
-1,
|
||||
&self->st,
|
||||
&tail);
|
||||
Py_END_ALLOW_THREADS
|
||||
|
||||
self->db = connection->db;
|
||||
|
||||
|
@ -219,7 +221,7 @@ void pysqlite_statement_bind_parameters(pysqlite_Statement* self, PyObject* para
|
|||
if (!_need_adapt(current_param)) {
|
||||
adapted = current_param;
|
||||
} else {
|
||||
adapted = microprotocols_adapt(current_param, (PyObject*)&pysqlite_PrepareProtocolType, NULL);
|
||||
adapted = pysqlite_microprotocols_adapt(current_param, (PyObject*)&pysqlite_PrepareProtocolType, NULL);
|
||||
if (adapted) {
|
||||
Py_DECREF(current_param);
|
||||
} else {
|
||||
|
@ -264,7 +266,7 @@ void pysqlite_statement_bind_parameters(pysqlite_Statement* self, PyObject* para
|
|||
if (!_need_adapt(current_param)) {
|
||||
adapted = current_param;
|
||||
} else {
|
||||
adapted = microprotocols_adapt(current_param, (PyObject*)&pysqlite_PrepareProtocolType, NULL);
|
||||
adapted = pysqlite_microprotocols_adapt(current_param, (PyObject*)&pysqlite_PrepareProtocolType, NULL);
|
||||
if (adapted) {
|
||||
Py_DECREF(current_param);
|
||||
} else {
|
||||
|
@ -302,11 +304,13 @@ int pysqlite_statement_recompile(pysqlite_Statement* self, PyObject* params)
|
|||
return rc;
|
||||
}
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS
|
||||
rc = sqlite3_prepare(self->db,
|
||||
sql_cstr,
|
||||
-1,
|
||||
&new_st,
|
||||
&tail);
|
||||
Py_END_ALLOW_THREADS
|
||||
|
||||
if (rc == SQLITE_OK) {
|
||||
/* The efficient sqlite3_transfer_bindings is only available in SQLite
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include "module.h"
|
||||
#include "connection.h"
|
||||
|
||||
int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, pysqlite_Connection* connection)
|
||||
int pysqlite_step(sqlite3_stmt* statement, pysqlite_Connection* connection)
|
||||
{
|
||||
int rc;
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
#include "sqlite3.h"
|
||||
#include "connection.h"
|
||||
|
||||
int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, pysqlite_Connection* connection);
|
||||
int pysqlite_step(sqlite3_stmt* statement, pysqlite_Connection* connection);
|
||||
|
||||
/**
|
||||
* Checks the SQLite error code and sets the appropriate DB-API exception.
|
||||
|
|
Loading…
Reference in New Issue