Whitespace normalization.

This commit is contained in:
Tim Peters 2001-01-15 00:50:52 +00:00
parent fa25a7d51f
commit 2344fae6d0
17 changed files with 3022 additions and 3024 deletions

View File

@ -1,6 +1,6 @@
"""An object-oriented interface to .netrc files."""
# Module and documentation by Eric S. Raymond, 21 Dec 1998
# Module and documentation by Eric S. Raymond, 21 Dec 1998
import os, shlex
@ -12,7 +12,7 @@ class netrc:
self.hosts = {}
self.macros = {}
lexer = shlex.shlex(fp)
# Allows @ in hostnames. Not a big deal...
# Allows @ in hostnames. Not a big deal...
lexer.wordchars = lexer.wordchars + '.-@'
while 1:
# Look for a machine, default, or macdef top-level keyword
@ -23,7 +23,7 @@ class netrc:
entryname = lexer.get_token()
elif tt == 'default':
entryname = 'default'
elif tt == 'macdef': # Just skip to end of macdefs
elif tt == 'macdef': # Just skip to end of macdefs
entryname = lexer.get_token()
self.macros[entryname] = []
lexer.whitepace = ' \t'
@ -36,7 +36,7 @@ class netrc:
self.macros[entryname].append(line)
else:
raise SyntaxError, "bad toplevel token %s, file %s, line %d" \
% (tt, file, lexer.lineno)
% (tt, file, lexer.lineno)
# We're looking at start of an entry for a named machine or default.
if toplevel == 'machine':
@ -87,6 +87,5 @@ class netrc:
rep = rep + "\n"
return rep
if __name__ == '__main__':
if __name__ == '__main__':
print netrc()

View File

@ -34,36 +34,36 @@ import socket
import string
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
apply(Exception.__init__, (self,)+args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
apply(Exception.__init__, (self,)+args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryError(NNTPError):
"""4xx errors"""
pass
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
"""Error in response data"""
pass
"""Error in response data"""
pass
# for backwards compatibility
error_reply = NNTPReplyError
@ -73,7 +73,7 @@ error_proto = NNTPProtocolError
error_data = NNTPDataError
# Standard port used by NNTP servers
NNTP_PORT = 119
@ -86,450 +86,450 @@ LONGRESP = ['100', '215', '220', '221', '222', '224', '230', '231', '282']
CRLF = '\r\n'
# The class itself
class NNTP:
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific comamnds, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.host, self.port))
self.file = self.sock.makefile('rb')
self.debugging = 0
self.welcome = self.getresp()
if readermode:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
if user:
resp = self.shortcmd('authinfo user '+user)
if resp[:3] == '381':
if not password:
raise NNTPReplyError(resp)
else:
resp = self.shortcmd(
'authinfo pass '+password)
if resp[:3] != '281':
raise NNTPPermanentError(resp)
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific comamnds, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.host, self.port))
self.file = self.sock.makefile('rb')
self.debugging = 0
self.welcome = self.getresp()
if readermode:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
if user:
resp = self.shortcmd('authinfo user '+user)
if resp[:3] == '381':
if not password:
raise NNTPReplyError(resp)
else:
resp = self.shortcmd(
'authinfo pass '+password)
if resp[:3] != '281':
raise NNTPPermanentError(resp)
# Get the welcome message from the server
# (this is read and squirreled away by __init__()).
# If the response code is 200, posting is allowed;
# if it 201, posting is not allowed
# Get the welcome message from the server
# (this is read and squirreled away by __init__()).
# If the response code is 200, posting is allowed;
# if it 201, posting is not allowed
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
if self.debugging: print '*welcome*', `self.welcome`
return self.welcome
if self.debugging: print '*welcome*', `self.welcome`
return self.welcome
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
self.debugging = level
debug = set_debuglevel
self.debugging = level
debug = set_debuglevel
def putline(self, line):
"""Internal: send one line to the server, appending CRLF."""
line = line + CRLF
if self.debugging > 1: print '*put*', `line`
self.sock.send(line)
def putline(self, line):
"""Internal: send one line to the server, appending CRLF."""
line = line + CRLF
if self.debugging > 1: print '*put*', `line`
self.sock.send(line)
def putcmd(self, line):
"""Internal: send one command to the server (through putline())."""
if self.debugging: print '*cmd*', `line`
self.putline(line)
def putcmd(self, line):
"""Internal: send one command to the server (through putline())."""
if self.debugging: print '*cmd*', `line`
self.putline(line)
def getline(self):
"""Internal: return one line from the server, stripping CRLF.
Raise EOFError if the connection is closed."""
line = self.file.readline()
if self.debugging > 1:
print '*get*', `line`
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
def getline(self):
"""Internal: return one line from the server, stripping CRLF.
Raise EOFError if the connection is closed."""
line = self.file.readline()
if self.debugging > 1:
print '*get*', `line`
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
def getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error."""
resp = self.getline()
if self.debugging: print '*resp*', `resp`
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error."""
resp = self.getline()
if self.debugging: print '*resp*', `resp`
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def getlongresp(self):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error."""
resp = self.getresp()
if resp[:3] not in LONGRESP:
raise NNTPReplyError(resp)
list = []
while 1:
line = self.getline()
if line == '.':
break
if line[:2] == '..':
line = line[1:]
list.append(line)
return resp, list
def getlongresp(self):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error."""
resp = self.getresp()
if resp[:3] not in LONGRESP:
raise NNTPReplyError(resp)
list = []
while 1:
line = self.getline()
if line == '.':
break
if line[:2] == '..':
line = line[1:]
list.append(line)
return resp, list
def shortcmd(self, line):
"""Internal: send a command and get the response."""
self.putcmd(line)
return self.getresp()
def shortcmd(self, line):
"""Internal: send a command and get the response."""
self.putcmd(line)
return self.getresp()
def longcmd(self, line):
"""Internal: send a command and get the response plus following text."""
self.putcmd(line)
return self.getlongresp()
def longcmd(self, line):
"""Internal: send a command and get the response plus following text."""
self.putcmd(line)
return self.getlongresp()
def newgroups(self, date, time):
"""Process a NEWGROUPS command. Arguments:
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of newsgroup names"""
def newgroups(self, date, time):
"""Process a NEWGROUPS command. Arguments:
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of newsgroup names"""
return self.longcmd('NEWGROUPS ' + date + ' ' + time)
return self.longcmd('NEWGROUPS ' + date + ' ' + time)
def newnews(self, group, date, time):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of article ids"""
def newnews(self, group, date, time):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of article ids"""
cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
return self.longcmd(cmd)
cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
return self.longcmd(cmd)
def list(self):
"""Process a LIST command. Return:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)"""
def list(self):
"""Process a LIST command. Return:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)"""
resp, list = self.longcmd('LIST')
for i in range(len(list)):
# Parse lines into "group last first flag"
list[i] = tuple(string.split(list[i]))
return resp, list
resp, list = self.longcmd('LIST')
for i in range(len(list)):
# Parse lines into "group last first flag"
list[i] = tuple(string.split(list[i]))
return resp, list
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles (string)
- first: first article number (string)
- last: last article number (string)
- name: the group name"""
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles (string)
- first: first article number (string)
- last: last article number (string)
- name: the group name"""
resp = self.shortcmd('GROUP ' + name)
if resp[:3] != '211':
raise NNTPReplyError(resp)
words = string.split(resp)
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = string.lower(words[4])
return resp, count, first, last, name
resp = self.shortcmd('GROUP ' + name)
if resp[:3] != '211':
raise NNTPReplyError(resp)
words = string.split(resp)
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = string.lower(words[4])
return resp, count, first, last, name
def help(self):
"""Process a HELP command. Returns:
- resp: server response if successful
- list: list of strings"""
def help(self):
"""Process a HELP command. Returns:
- resp: server response if successful
- list: list of strings"""
return self.longcmd('HELP')
return self.longcmd('HELP')
def statparse(self, resp):
"""Internal: parse the response of a STAT, NEXT or LAST command."""
if resp[:2] != '22':
raise NNTPReplyError(resp)
words = string.split(resp)
nr = 0
id = ''
n = len(words)
if n > 1:
nr = words[1]
if n > 2:
id = words[2]
return resp, nr, id
def statparse(self, resp):
"""Internal: parse the response of a STAT, NEXT or LAST command."""
if resp[:2] != '22':
raise NNTPReplyError(resp)
words = string.split(resp)
nr = 0
id = ''
n = len(words)
if n > 1:
nr = words[1]
if n > 2:
id = words[2]
return resp, nr, id
def statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self.shortcmd(line)
return self.statparse(resp)
def statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self.shortcmd(line)
return self.statparse(resp)
def stat(self, id):
"""Process a STAT command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: the article number
- id: the article id"""
def stat(self, id):
"""Process a STAT command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: the article number
- id: the article id"""
return self.statcmd('STAT ' + id)
return self.statcmd('STAT ' + id)
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self.statcmd('NEXT')
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self.statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self.statcmd('LAST')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self.statcmd('LAST')
def artcmd(self, line):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, list = self.longcmd(line)
resp, nr, id = self.statparse(resp)
return resp, nr, id, list
def artcmd(self, line):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, list = self.longcmd(line)
resp, nr, id = self.statparse(resp)
return resp, nr, id, list
def head(self, id):
"""Process a HEAD command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's header"""
def head(self, id):
"""Process a HEAD command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's header"""
return self.artcmd('HEAD ' + id)
return self.artcmd('HEAD ' + id)
def body(self, id):
"""Process a BODY command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's body"""
def body(self, id):
"""Process a BODY command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's body"""
return self.artcmd('BODY ' + id)
return self.artcmd('BODY ' + id)
def article(self, id):
"""Process an ARTICLE command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article"""
def article(self, id):
"""Process an ARTICLE command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article"""
return self.artcmd('ARTICLE ' + id)
return self.artcmd('ARTICLE ' + id)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful"""
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful"""
return self.shortcmd('SLAVE')
return self.shortcmd('SLAVE')
def xhdr(self, hdr, str):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
Returns:
- resp: server response if successful
- list: list of (nr, value) strings"""
def xhdr(self, hdr, str):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
Returns:
- resp: server response if successful
- list: list of (nr, value) strings"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str)
for i in range(len(lines)):
line = lines[i]
m = pat.match(line)
if m:
lines[i] = m.group(1, 2)
return resp, lines
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str)
for i in range(len(lines)):
line = lines[i]
m = pat.match(line)
if m:
lines[i] = m.group(1, 2)
return resp, lines
def xover(self,start,end):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
Returns:
- resp: server response if successful
- list: list of (art-nr, subject, poster, date,
id, references, size, lines)"""
def xover(self,start,end):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
Returns:
- resp: server response if successful
- list: list of (art-nr, subject, poster, date,
id, references, size, lines)"""
resp, lines = self.longcmd('XOVER ' + start + '-' + end)
xover_lines = []
for line in lines:
elem = string.splitfields(line,"\t")
try:
xover_lines.append((elem[0],
elem[1],
elem[2],
elem[3],
elem[4],
string.split(elem[5]),
elem[6],
elem[7]))
except IndexError:
raise NNTPDataError(line)
return resp,xover_lines
resp, lines = self.longcmd('XOVER ' + start + '-' + end)
xover_lines = []
for line in lines:
elem = string.splitfields(line,"\t")
try:
xover_lines.append((elem[0],
elem[1],
elem[2],
elem[3],
elem[4],
string.split(elem[5]),
elem[6],
elem[7]))
except IndexError:
raise NNTPDataError(line)
return resp,xover_lines
def xgtitle(self, group):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
def xgtitle(self, group):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
resp, raw_lines = self.longcmd('XGTITLE ' + group)
lines = []
for raw_line in raw_lines:
match = line_pat.search(string.strip(raw_line))
if match:
lines.append(match.group(1, 2))
return resp, lines
line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
resp, raw_lines = self.longcmd('XGTITLE ' + group)
lines = []
for raw_line in raw_lines:
match = line_pat.search(string.strip(raw_line))
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self,id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article"""
def xpath(self,id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article"""
resp = self.shortcmd("XPATH " + id)
if resp[:3] != '223':
raise NNTPReplyError(resp)
try:
[resp_num, path] = string.split(resp)
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
resp = self.shortcmd("XPATH " + id)
if resp[:3] != '223':
raise NNTPReplyError(resp)
try:
[resp_num, path] = string.split(resp)
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date (self):
"""Process the DATE command. Arguments:
None
Returns:
resp: server response if successful
date: Date suitable for newnews/newgroups commands etc.
time: Time suitable for newnews/newgroups commands etc."""
def date (self):
"""Process the DATE command. Arguments:
None
Returns:
resp: server response if successful
date: Date suitable for newnews/newgroups commands etc.
time: Time suitable for newnews/newgroups commands etc."""
resp = self.shortcmd("DATE")
if resp[:3] != '111':
raise NNTPReplyError(resp)
elem = string.split(resp)
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1][2:8]
time = elem[1][-6:]
if len(date) != 6 or len(time) != 6:
raise NNTPDataError(resp)
return resp, date, time
resp = self.shortcmd("DATE")
if resp[:3] != '111':
raise NNTPReplyError(resp)
elem = string.split(resp)
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1][2:8]
time = elem[1][-6:]
if len(date) != 6 or len(time) != 6:
raise NNTPDataError(resp)
return resp, date, time
def post(self, f):
"""Process a POST command. Arguments:
- f: file containing the article
Returns:
- resp: server response if successful"""
def post(self, f):
"""Process a POST command. Arguments:
- f: file containing the article
Returns:
- resp: server response if successful"""
resp = self.shortcmd('POST')
# Raises error_??? if posting is not allowed
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
resp = self.shortcmd('POST')
# Raises error_??? if posting is not allowed
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def ihave(self, id, f):
"""Process an IHAVE command. Arguments:
- id: message-id of the article
- f: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
def ihave(self, id, f):
"""Process an IHAVE command. Arguments:
- id: message-id of the article
- f: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
resp = self.shortcmd('IHAVE ' + id)
# Raises error_??? if the server already has it
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
resp = self.shortcmd('IHAVE ' + id)
# Raises error_??? if the server already has it
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
resp = self.shortcmd('QUIT')
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
resp = self.shortcmd('QUIT')
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
def _test():
"""Minimal test function."""
s = NNTP('news', readermode='reader')
resp, count, first, last, name = s.group('comp.lang.python')
print resp
print 'Group', name, 'has', count, 'articles, range', first, 'to', last
resp, subs = s.xhdr('subject', first + '-' + last)
print resp
for item in subs:
print "%7s %s" % item
resp = s.quit()
print resp
"""Minimal test function."""
s = NNTP('news', readermode='reader')
resp, count, first, last, name = s.group('comp.lang.python')
print resp
print 'Group', name, 'has', count, 'articles, range', first, 'to', last
resp, subs = s.xhdr('subject', first + '-' + last)
print resp
for item in subs:
print "%7s %s" % item
resp = s.quit()
print resp
# Run the test when run as a script
if __name__ == '__main__':
_test()
_test()

View File

@ -1,5 +1,5 @@
# Module 'ntpath' -- common operations on WinNT/Win95 pathnames
"""Common pathname manipulations, WindowsNT/95 version.
"""Common pathname manipulations, WindowsNT/95 version.
Instead of importing this module directly, import os and refer to this
module as os.path.
@ -254,7 +254,7 @@ def ismount(path):
def walk(top, func, arg):
"""Directory tree walk whth callback function.
walk(top, func, arg) calls func(arg, d, files) for each directory d
walk(top, func, arg) calls func(arg, d, files) for each directory d
in the tree rooted at top (including top itself); files is a list
of all the files and subdirs in directory d."""
try:
@ -313,7 +313,7 @@ def expanduser(path):
# XXX With COMMAND.COM you can use any characters in a variable name,
# XXX except '^|<>='.
def expandvars(path):
def expandvars(path):
"""Expand shell variables of form $var and ${var}.
Unknown variables are left unchanged."""

View File

@ -1,66 +1,66 @@
"""Convert a NT pathname to a file URL and vice versa."""
def url2pathname(url):
r"""Convert a URL to a DOS path.
r"""Convert a URL to a DOS path.
///C|/foo/bar/spam.foo
///C|/foo/bar/spam.foo
becomes
becomes
C:\foo\bar\spam.foo
"""
import string, urllib
if not '|' in url:
# No drive specifier, just convert slashes
if url[:4] == '////':
# path is something like ////host/path/on/remote/host
# convert this to \\host\path\on\remote\host
# (notice halving of slashes at the start of the path)
url = url[2:]
components = string.split(url, '/')
# make sure not to convert quoted slashes :-)
return urllib.unquote(string.join(components, '\\'))
comp = string.split(url, '|')
if len(comp) != 2 or comp[0][-1] not in string.letters:
error = 'Bad URL: ' + url
raise IOError, error
drive = string.upper(comp[0][-1])
components = string.split(comp[1], '/')
path = drive + ':'
for comp in components:
if comp:
path = path + '\\' + urllib.unquote(comp)
return path
C:\foo\bar\spam.foo
"""
import string, urllib
if not '|' in url:
# No drive specifier, just convert slashes
if url[:4] == '////':
# path is something like ////host/path/on/remote/host
# convert this to \\host\path\on\remote\host
# (notice halving of slashes at the start of the path)
url = url[2:]
components = string.split(url, '/')
# make sure not to convert quoted slashes :-)
return urllib.unquote(string.join(components, '\\'))
comp = string.split(url, '|')
if len(comp) != 2 or comp[0][-1] not in string.letters:
error = 'Bad URL: ' + url
raise IOError, error
drive = string.upper(comp[0][-1])
components = string.split(comp[1], '/')
path = drive + ':'
for comp in components:
if comp:
path = path + '\\' + urllib.unquote(comp)
return path
def pathname2url(p):
r"""Convert a DOS path name to a file url.
r"""Convert a DOS path name to a file url.
C:\foo\bar\spam.foo
C:\foo\bar\spam.foo
becomes
becomes
///C|/foo/bar/spam.foo
"""
///C|/foo/bar/spam.foo
"""
import string, urllib
if not ':' in p:
# No drive specifier, just convert slashes and quote the name
if p[:2] == '\\\\':
# path is something like \\host\path\on\remote\host
# convert this to ////host/path/on/remote/host
# (notice doubling of slashes at the start of the path)
p = '\\\\' + p
components = string.split(p, '\\')
return urllib.quote(string.join(components, '/'))
comp = string.split(p, ':')
if len(comp) != 2 or len(comp[0]) > 1:
error = 'Bad path: ' + p
raise IOError, error
import string, urllib
if not ':' in p:
# No drive specifier, just convert slashes and quote the name
if p[:2] == '\\\\':
# path is something like \\host\path\on\remote\host
# convert this to ////host/path/on/remote/host
# (notice doubling of slashes at the start of the path)
p = '\\\\' + p
components = string.split(p, '\\')
return urllib.quote(string.join(components, '/'))
comp = string.split(p, ':')
if len(comp) != 2 or len(comp[0]) > 1:
error = 'Bad path: ' + p
raise IOError, error
drive = urllib.quote(string.upper(comp[0]))
components = string.split(comp[1], '\\')
path = '///' + drive + '|'
for comp in components:
if comp:
path = path + '/' + urllib.quote(comp)
return path
drive = urllib.quote(string.upper(comp[0]))
components = string.split(comp[1], '\\')
path = '///' + drive + '|'
for comp in components:
if comp:
path = path + '/' + urllib.quote(comp)
return path

View File

@ -213,7 +213,7 @@ def execlpe(file, *args):
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env, replacing the current
process. """
process. """
env = args[-1]
execvpe(file, args[:-1], env)
@ -231,7 +231,7 @@ def execvpe(file, args, env):
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env , replacing the
current process.
args may be a list or tuple of strings. """
args may be a list or tuple of strings. """
_execvpe(file, args, env)
_notfound = None
@ -370,7 +370,7 @@ if _exists("fork") and not _exists("spawnv") and _exists("execv"):
Execute file with arguments from args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, None, execv)
def spawnve(mode, file, args, env):

1766
Lib/pdb.py

File diff suppressed because it is too large Load Diff

View File

@ -123,7 +123,7 @@ class Pickler:
return LONG_BINGET + s
return GET + `i` + '\n'
def save(self, object, pers_save = 0):
memo = self.memo
@ -134,7 +134,7 @@ class Pickler:
return
d = id(object)
t = type(object)
if ((t is TupleType) and (len(object) == 0)):
@ -179,14 +179,14 @@ class Pickler:
"tuple" % reduce
l = len(tup)
if ((l != 2) and (l != 3)):
raise PicklingError, "tuple returned by %s must contain " \
"only two or three elements" % reduce
callable = tup[0]
arg_tup = tup[1]
if (l > 2):
state = tup[2]
else:
@ -196,7 +196,7 @@ class Pickler:
raise PicklingError, "Second element of tuple returned " \
"by %s must be a tuple" % reduce
self.save_reduce(callable, arg_tup, state)
self.save_reduce(callable, arg_tup, state)
memo_len = len(memo)
self.write(self.put(memo_len))
memo[d] = (memo_len, object)
@ -224,7 +224,7 @@ class Pickler:
save(callable)
save(arg_tup)
write(REDUCE)
if (state is not None):
save(state)
write(BUILD)
@ -317,7 +317,7 @@ class Pickler:
if (self.bin):
write(POP_MARK + self.get(memo[d][0]))
return
write(POP * (len(object) + 1) + self.get(memo[d][0]))
return
@ -352,7 +352,7 @@ class Pickler:
for element in object:
save(element)
if (not using_appends):
write(APPEND)
@ -542,7 +542,7 @@ class Unpickler:
def load_binpersid(self):
stack = self.stack
pid = stack[-1]
del stack[-1]
@ -568,7 +568,7 @@ class Unpickler:
def load_binint2(self):
self.append(mloads('i' + self.read(2) + '\000\000'))
dispatch[BININT2] = load_binint2
def load_long(self):
self.append(long(self.readline()[:-1], 0))
dispatch[LONG] = load_long
@ -710,7 +710,7 @@ class Unpickler:
k = self.marker()
klass = stack[k + 1]
del stack[k + 1]
args = tuple(stack[k + 1:])
args = tuple(stack[k + 1:])
del stack[k:]
instantiated = 0
if (not args and type(klass) is ClassType and
@ -726,7 +726,7 @@ class Unpickler:
if not instantiated:
value = apply(klass, args)
self.append(value)
dispatch[OBJ] = load_obj
dispatch[OBJ] = load_obj
def load_global(self):
module = self.readline()[:-1]
@ -761,8 +761,8 @@ class Unpickler:
safe = None
if (not safe):
raise UnpicklingError, "%s is not safe for " \
"unpickling" % callable
raise UnpicklingError, "%s is not safe for " \
"unpickling" % callable
if arg_tup is None:
value = callable.__basicnew__()
@ -829,7 +829,7 @@ class Unpickler:
del stack[mark:]
dispatch[APPENDS] = load_appends
def load_setitem(self):
stack = self.stack
value = stack[-1]

View File

@ -69,229 +69,229 @@ import string
# Conversion step kinds
FILEIN_FILEOUT = 'ff' # Must read & write real files
STDIN_FILEOUT = '-f' # Must write a real file
FILEIN_STDOUT = 'f-' # Must read a real file
STDIN_STDOUT = '--' # Normal pipeline element
SOURCE = '.-' # Must be first, writes stdout
SINK = '-.' # Must be last, reads stdin
FILEIN_FILEOUT = 'ff' # Must read & write real files
STDIN_FILEOUT = '-f' # Must write a real file
FILEIN_STDOUT = 'f-' # Must read a real file
STDIN_STDOUT = '--' # Normal pipeline element
SOURCE = '.-' # Must be first, writes stdout
SINK = '-.' # Must be last, reads stdin
stepkinds = [FILEIN_FILEOUT, STDIN_FILEOUT, FILEIN_STDOUT, STDIN_STDOUT, \
SOURCE, SINK]
SOURCE, SINK]
class Template:
"""Class representing a pipeline template."""
"""Class representing a pipeline template."""
def __init__(self):
"""Template() returns a fresh pipeline template."""
self.debugging = 0
self.reset()
def __init__(self):
"""Template() returns a fresh pipeline template."""
self.debugging = 0
self.reset()
def __repr__(self):
"""t.__repr__() implements `t`."""
return '<Template instance, steps=' + `self.steps` + '>'
def __repr__(self):
"""t.__repr__() implements `t`."""
return '<Template instance, steps=' + `self.steps` + '>'
def reset(self):
"""t.reset() restores a pipeline template to its initial state."""
self.steps = []
def reset(self):
"""t.reset() restores a pipeline template to its initial state."""
self.steps = []
def clone(self):
"""t.clone() returns a new pipeline template with identical
initial state as the current one."""
t = Template()
t.steps = self.steps[:]
t.debugging = self.debugging
return t
def clone(self):
"""t.clone() returns a new pipeline template with identical
initial state as the current one."""
t = Template()
t.steps = self.steps[:]
t.debugging = self.debugging
return t
def debug(self, flag):
"""t.debug(flag) turns debugging on or off."""
self.debugging = flag
def debug(self, flag):
"""t.debug(flag) turns debugging on or off."""
self.debugging = flag
def append(self, cmd, kind):
"""t.append(cmd, kind) adds a new step at the end."""
if type(cmd) is not type(''):
raise TypeError, \
'Template.append: cmd must be a string'
if kind not in stepkinds:
raise ValueError, \
'Template.append: bad kind ' + `kind`
if kind == SOURCE:
raise ValueError, \
'Template.append: SOURCE can only be prepended'
if self.steps and self.steps[-1][1] == SINK:
raise ValueError, \
'Template.append: already ends with SINK'
if kind[0] == 'f' and not re.search('\$IN\b', cmd):
raise ValueError, \
'Template.append: missing $IN in cmd'
if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
raise ValueError, \
'Template.append: missing $OUT in cmd'
self.steps.append((cmd, kind))
def append(self, cmd, kind):
"""t.append(cmd, kind) adds a new step at the end."""
if type(cmd) is not type(''):
raise TypeError, \
'Template.append: cmd must be a string'
if kind not in stepkinds:
raise ValueError, \
'Template.append: bad kind ' + `kind`
if kind == SOURCE:
raise ValueError, \
'Template.append: SOURCE can only be prepended'
if self.steps and self.steps[-1][1] == SINK:
raise ValueError, \
'Template.append: already ends with SINK'
if kind[0] == 'f' and not re.search('\$IN\b', cmd):
raise ValueError, \
'Template.append: missing $IN in cmd'
if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
raise ValueError, \
'Template.append: missing $OUT in cmd'
self.steps.append((cmd, kind))
def prepend(self, cmd, kind):
"""t.prepend(cmd, kind) adds a new step at the front."""
if type(cmd) is not type(''):
raise TypeError, \
'Template.prepend: cmd must be a string'
if kind not in stepkinds:
raise ValueError, \
'Template.prepend: bad kind ' + `kind`
if kind == SINK:
raise ValueError, \
'Template.prepend: SINK can only be appended'
if self.steps and self.steps[0][1] == SOURCE:
raise ValueError, \
'Template.prepend: already begins with SOURCE'
if kind[0] == 'f' and not re.search('\$IN\b', cmd):
raise ValueError, \
'Template.prepend: missing $IN in cmd'
if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
raise ValueError, \
'Template.prepend: missing $OUT in cmd'
self.steps.insert(0, (cmd, kind))
def prepend(self, cmd, kind):
"""t.prepend(cmd, kind) adds a new step at the front."""
if type(cmd) is not type(''):
raise TypeError, \
'Template.prepend: cmd must be a string'
if kind not in stepkinds:
raise ValueError, \
'Template.prepend: bad kind ' + `kind`
if kind == SINK:
raise ValueError, \
'Template.prepend: SINK can only be appended'
if self.steps and self.steps[0][1] == SOURCE:
raise ValueError, \
'Template.prepend: already begins with SOURCE'
if kind[0] == 'f' and not re.search('\$IN\b', cmd):
raise ValueError, \
'Template.prepend: missing $IN in cmd'
if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
raise ValueError, \
'Template.prepend: missing $OUT in cmd'
self.steps.insert(0, (cmd, kind))
def open(self, file, rw):
"""t.open(file, rw) returns a pipe or file object open for
reading or writing; the file is the other end of the pipeline."""
if rw == 'r':
return self.open_r(file)
if rw == 'w':
return self.open_w(file)
raise ValueError, \
'Template.open: rw must be \'r\' or \'w\', not ' + `rw`
def open(self, file, rw):
"""t.open(file, rw) returns a pipe or file object open for
reading or writing; the file is the other end of the pipeline."""
if rw == 'r':
return self.open_r(file)
if rw == 'w':
return self.open_w(file)
raise ValueError, \
'Template.open: rw must be \'r\' or \'w\', not ' + `rw`
def open_r(self, file):
"""t.open_r(file) and t.open_w(file) implement
t.open(file, 'r') and t.open(file, 'w') respectively."""
if not self.steps:
return open(file, 'r')
if self.steps[-1][1] == SINK:
raise ValueError, \
'Template.open_r: pipeline ends width SINK'
cmd = self.makepipeline(file, '')
return os.popen(cmd, 'r')
def open_r(self, file):
"""t.open_r(file) and t.open_w(file) implement
t.open(file, 'r') and t.open(file, 'w') respectively."""
if not self.steps:
return open(file, 'r')
if self.steps[-1][1] == SINK:
raise ValueError, \
'Template.open_r: pipeline ends width SINK'
cmd = self.makepipeline(file, '')
return os.popen(cmd, 'r')
def open_w(self, file):
if not self.steps:
return open(file, 'w')
if self.steps[0][1] == SOURCE:
raise ValueError, \
'Template.open_w: pipeline begins with SOURCE'
cmd = self.makepipeline('', file)
return os.popen(cmd, 'w')
def open_w(self, file):
if not self.steps:
return open(file, 'w')
if self.steps[0][1] == SOURCE:
raise ValueError, \
'Template.open_w: pipeline begins with SOURCE'
cmd = self.makepipeline('', file)
return os.popen(cmd, 'w')
def copy(self, infile, outfile):
return os.system(self.makepipeline(infile, outfile))
def copy(self, infile, outfile):
return os.system(self.makepipeline(infile, outfile))
def makepipeline(self, infile, outfile):
cmd = makepipeline(infile, self.steps, outfile)
if self.debugging:
print cmd
cmd = 'set -x; ' + cmd
return cmd
def makepipeline(self, infile, outfile):
cmd = makepipeline(infile, self.steps, outfile)
if self.debugging:
print cmd
cmd = 'set -x; ' + cmd
return cmd
def makepipeline(infile, steps, outfile):
# Build a list with for each command:
# [input filename or '', command string, kind, output filename or '']
list = []
for cmd, kind in steps:
list.append(['', cmd, kind, ''])
#
# Make sure there is at least one step
#
if not list:
list.append(['', 'cat', '--', ''])
#
# Take care of the input and output ends
#
[cmd, kind] = list[0][1:3]
if kind[0] == 'f' and not infile:
list.insert(0, ['', 'cat', '--', ''])
list[0][0] = infile
#
[cmd, kind] = list[-1][1:3]
if kind[1] == 'f' and not outfile:
list.append(['', 'cat', '--', ''])
list[-1][-1] = outfile
#
# Invent temporary files to connect stages that need files
#
garbage = []
for i in range(1, len(list)):
lkind = list[i-1][2]
rkind = list[i][2]
if lkind[1] == 'f' or rkind[0] == 'f':
temp = tempfile.mktemp()
garbage.append(temp)
list[i-1][-1] = list[i][0] = temp
#
for item in list:
[inf, cmd, kind, outf] = item
if kind[1] == 'f':
cmd = 'OUT=' + quote(outf) + '; ' + cmd
if kind[0] == 'f':
cmd = 'IN=' + quote(inf) + '; ' + cmd
if kind[0] == '-' and inf:
cmd = cmd + ' <' + quote(inf)
if kind[1] == '-' and outf:
cmd = cmd + ' >' + quote(outf)
item[1] = cmd
#
cmdlist = list[0][1]
for item in list[1:]:
[cmd, kind] = item[1:3]
if item[0] == '':
if 'f' in kind:
cmd = '{ ' + cmd + '; }'
cmdlist = cmdlist + ' |\n' + cmd
else:
cmdlist = cmdlist + '\n' + cmd
#
if garbage:
rmcmd = 'rm -f'
for file in garbage:
rmcmd = rmcmd + ' ' + quote(file)
trapcmd = 'trap ' + quote(rmcmd + '; exit') + ' 1 2 3 13 14 15'
cmdlist = trapcmd + '\n' + cmdlist + '\n' + rmcmd
#
return cmdlist
# Build a list with for each command:
# [input filename or '', command string, kind, output filename or '']
list = []
for cmd, kind in steps:
list.append(['', cmd, kind, ''])
#
# Make sure there is at least one step
#
if not list:
list.append(['', 'cat', '--', ''])
#
# Take care of the input and output ends
#
[cmd, kind] = list[0][1:3]
if kind[0] == 'f' and not infile:
list.insert(0, ['', 'cat', '--', ''])
list[0][0] = infile
#
[cmd, kind] = list[-1][1:3]
if kind[1] == 'f' and not outfile:
list.append(['', 'cat', '--', ''])
list[-1][-1] = outfile
#
# Invent temporary files to connect stages that need files
#
garbage = []
for i in range(1, len(list)):
lkind = list[i-1][2]
rkind = list[i][2]
if lkind[1] == 'f' or rkind[0] == 'f':
temp = tempfile.mktemp()
garbage.append(temp)
list[i-1][-1] = list[i][0] = temp
#
for item in list:
[inf, cmd, kind, outf] = item
if kind[1] == 'f':
cmd = 'OUT=' + quote(outf) + '; ' + cmd
if kind[0] == 'f':
cmd = 'IN=' + quote(inf) + '; ' + cmd
if kind[0] == '-' and inf:
cmd = cmd + ' <' + quote(inf)
if kind[1] == '-' and outf:
cmd = cmd + ' >' + quote(outf)
item[1] = cmd
#
cmdlist = list[0][1]
for item in list[1:]:
[cmd, kind] = item[1:3]
if item[0] == '':
if 'f' in kind:
cmd = '{ ' + cmd + '; }'
cmdlist = cmdlist + ' |\n' + cmd
else:
cmdlist = cmdlist + '\n' + cmd
#
if garbage:
rmcmd = 'rm -f'
for file in garbage:
rmcmd = rmcmd + ' ' + quote(file)
trapcmd = 'trap ' + quote(rmcmd + '; exit') + ' 1 2 3 13 14 15'
cmdlist = trapcmd + '\n' + cmdlist + '\n' + rmcmd
#
return cmdlist
# Reliably quote a string as a single argument for /bin/sh
_safechars = string.letters + string.digits + '!@%_-+=:,./' # Safe unquoted
_funnychars = '"`$\\' # Unsafe inside "double quotes"
_safechars = string.letters + string.digits + '!@%_-+=:,./' # Safe unquoted
_funnychars = '"`$\\' # Unsafe inside "double quotes"
def quote(file):
for c in file:
if c not in _safechars:
break
else:
return file
if '\'' not in file:
return '\'' + file + '\''
res = ''
for c in file:
if c in _funnychars:
c = '\\' + c
res = res + c
return '"' + res + '"'
for c in file:
if c not in _safechars:
break
else:
return file
if '\'' not in file:
return '\'' + file + '\''
res = ''
for c in file:
if c in _funnychars:
c = '\\' + c
res = res + c
return '"' + res + '"'
# Small test program and example
def test():
print 'Testing...'
t = Template()
t.append('togif $IN $OUT', 'ff')
t.append('giftoppm', '--')
t.append('ppmtogif >$OUT', '-f')
t.append('fromgif $IN $OUT', 'ff')
t.debug(1)
FILE = '/usr/local/images/rgb/rogues/guido.rgb'
t.copy(FILE, '@temp')
print 'Done.'
print 'Testing...'
t = Template()
t.append('togif $IN $OUT', 'ff')
t.append('giftoppm', '--')
t.append('ppmtogif >$OUT', '-f')
t.append('fromgif $IN $OUT', 'ff')
t.debug(1)
FILE = '/usr/local/images/rgb/rogues/guido.rgb'
t.copy(FILE, '@temp')
print 'Done.'

View File

@ -32,290 +32,290 @@ CRLF = CR+LF
class POP3:
"""This class supports both the minimal and optional command sets.
Arguments can be strings or integers (where appropriate)
(e.g.: retr(1) and retr('1') both work equally well.
Minimal Command Set:
USER name user(name)
PASS string pass_(string)
STAT stat()
LIST [msg] list(msg = None)
RETR msg retr(msg)
DELE msg dele(msg)
NOOP noop()
RSET rset()
QUIT quit()
Optional Commands (some servers support these):
RPOP name rpop(name)
APOP name digest apop(name, digest)
TOP msg n top(msg, n)
UIDL [msg] uidl(msg = None)
Raises one exception: 'error_proto'.
"""This class supports both the minimal and optional command sets.
Arguments can be strings or integers (where appropriate)
(e.g.: retr(1) and retr('1') both work equally well.
Minimal Command Set:
USER name user(name)
PASS string pass_(string)
STAT stat()
LIST [msg] list(msg = None)
RETR msg retr(msg)
DELE msg dele(msg)
NOOP noop()
RSET rset()
QUIT quit()
Optional Commands (some servers support these):
RPOP name rpop(name)
APOP name digest apop(name, digest)
TOP msg n top(msg, n)
UIDL [msg] uidl(msg = None)
Raises one exception: 'error_proto'.
Instantiate with:
POP3(hostname, port=110)
Instantiate with:
POP3(hostname, port=110)
NB: the POP protocol locks the mailbox from user
authorization until QUIT, so be sure to get in, suck
the messages, and quit, each time you access the
mailbox.
NB: the POP protocol locks the mailbox from user
authorization until QUIT, so be sure to get in, suck
the messages, and quit, each time you access the
mailbox.
POP is a line-based protocol, which means large mail
messages consume lots of python cycles reading them
line-by-line.
POP is a line-based protocol, which means large mail
messages consume lots of python cycles reading them
line-by-line.
If it's available on your mail server, use IMAP4
instead, it doesn't suffer from the two problems
above.
"""
If it's available on your mail server, use IMAP4
instead, it doesn't suffer from the two problems
above.
"""
def __init__(self, host, port = POP3_PORT):
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.host, self.port))
self.file = self.sock.makefile('rb')
self._debugging = 0
self.welcome = self._getresp()
def __init__(self, host, port = POP3_PORT):
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.host, self.port))
self.file = self.sock.makefile('rb')
self._debugging = 0
self.welcome = self._getresp()
def _putline(self, line):
#if self._debugging > 1: print '*put*', `line`
self.sock.send('%s%s' % (line, CRLF))
def _putline(self, line):
#if self._debugging > 1: print '*put*', `line`
self.sock.send('%s%s' % (line, CRLF))
# Internal: send one command to the server (through _putline())
# Internal: send one command to the server (through _putline())
def _putcmd(self, line):
#if self._debugging: print '*cmd*', `line`
self._putline(line)
def _putcmd(self, line):
#if self._debugging: print '*cmd*', `line`
self._putline(line)
# Internal: return one line from the server, stripping CRLF.
# This is where all the CPU time of this module is consumed.
# Raise error_proto('-ERR EOF') if the connection is closed.
# Internal: return one line from the server, stripping CRLF.
# This is where all the CPU time of this module is consumed.
# Raise error_proto('-ERR EOF') if the connection is closed.
def _getline(self):
line = self.file.readline()
#if self._debugging > 1: print '*get*', `line`
if not line: raise error_proto('-ERR EOF')
octets = len(line)
# server can send any combination of CR & LF
# however, 'readline()' returns lines ending in LF
# so only possibilities are ...LF, ...CRLF, CR...LF
if line[-2:] == CRLF:
return line[:-2], octets
if line[0] == CR:
return line[1:-1], octets
return line[:-1], octets
def _getline(self):
line = self.file.readline()
#if self._debugging > 1: print '*get*', `line`
if not line: raise error_proto('-ERR EOF')
octets = len(line)
# server can send any combination of CR & LF
# however, 'readline()' returns lines ending in LF
# so only possibilities are ...LF, ...CRLF, CR...LF
if line[-2:] == CRLF:
return line[:-2], octets
if line[0] == CR:
return line[1:-1], octets
return line[:-1], octets
# Internal: get a response from the server.
# Raise 'error_proto' if the response doesn't start with '+'.
# Internal: get a response from the server.
# Raise 'error_proto' if the response doesn't start with '+'.
def _getresp(self):
resp, o = self._getline()
#if self._debugging > 1: print '*resp*', `resp`
c = resp[:1]
if c != '+':
raise error_proto(resp)
return resp
def _getresp(self):
resp, o = self._getline()
#if self._debugging > 1: print '*resp*', `resp`
c = resp[:1]
if c != '+':
raise error_proto(resp)
return resp
# Internal: get a response plus following text from the server.
# Internal: get a response plus following text from the server.
def _getlongresp(self):
resp = self._getresp()
list = []; octets = 0
line, o = self._getline()
while line != '.':
if line[:2] == '..':
o = o-1
line = line[1:]
octets = octets + o
list.append(line)
line, o = self._getline()
return resp, list, octets
def _getlongresp(self):
resp = self._getresp()
list = []; octets = 0
line, o = self._getline()
while line != '.':
if line[:2] == '..':
o = o-1
line = line[1:]
octets = octets + o
list.append(line)
line, o = self._getline()
return resp, list, octets
# Internal: send a command and get the response
# Internal: send a command and get the response
def _shortcmd(self, line):
self._putcmd(line)
return self._getresp()
def _shortcmd(self, line):
self._putcmd(line)
return self._getresp()
# Internal: send a command and get the response plus following text
# Internal: send a command and get the response plus following text
def _longcmd(self, line):
self._putcmd(line)
return self._getlongresp()
def _longcmd(self, line):
self._putcmd(line)
return self._getlongresp()
# These can be useful:
# These can be useful:
def getwelcome(self):
return self.welcome
def getwelcome(self):
return self.welcome
def set_debuglevel(self, level):
self._debugging = level
def set_debuglevel(self, level):
self._debugging = level
# Here are all the POP commands:
# Here are all the POP commands:
def user(self, user):
"""Send user name, return response
(should indicate password required).
"""
return self._shortcmd('USER %s' % user)
def user(self, user):
"""Send user name, return response
(should indicate password required).
"""
return self._shortcmd('USER %s' % user)
def pass_(self, pswd):
"""Send password, return response
(response includes message count, mailbox size).
NB: mailbox is locked by server from here to 'quit()'
"""
return self._shortcmd('PASS %s' % pswd)
def pass_(self, pswd):
"""Send password, return response
(response includes message count, mailbox size).
def stat(self):
"""Get mailbox status.
Result is tuple of 2 ints (message count, mailbox size)
"""
retval = self._shortcmd('STAT')
rets = string.split(retval)
#if self._debugging: print '*stat*', `rets`
numMessages = string.atoi(rets[1])
sizeMessages = string.atoi(rets[2])
return (numMessages, sizeMessages)
NB: mailbox is locked by server from here to 'quit()'
"""
return self._shortcmd('PASS %s' % pswd)
def list(self, which=None):
"""Request listing, return result.
def stat(self):
"""Get mailbox status.
Result without a message number argument is in form
['response', ['mesg_num octets', ...]].
Result is tuple of 2 ints (message count, mailbox size)
"""
retval = self._shortcmd('STAT')
rets = string.split(retval)
#if self._debugging: print '*stat*', `rets`
numMessages = string.atoi(rets[1])
sizeMessages = string.atoi(rets[2])
return (numMessages, sizeMessages)
Result when a message number argument is given is a
single response: the "scan listing" for that message.
"""
if which:
return self._shortcmd('LIST %s' % which)
return self._longcmd('LIST')
def list(self, which=None):
"""Request listing, return result.
def retr(self, which):
"""Retrieve whole message number 'which'.
Result without a message number argument is in form
['response', ['mesg_num octets', ...]].
Result is in form ['response', ['line', ...], octets].
"""
return self._longcmd('RETR %s' % which)
Result when a message number argument is given is a
single response: the "scan listing" for that message.
"""
if which:
return self._shortcmd('LIST %s' % which)
return self._longcmd('LIST')
def dele(self, which):
"""Delete message number 'which'.
def retr(self, which):
"""Retrieve whole message number 'which'.
Result is 'response'.
"""
return self._shortcmd('DELE %s' % which)
Result is in form ['response', ['line', ...], octets].
"""
return self._longcmd('RETR %s' % which)
def noop(self):
"""Does nothing.
One supposes the response indicates the server is alive.
"""
return self._shortcmd('NOOP')
def dele(self, which):
"""Delete message number 'which'.
Result is 'response'.
"""
return self._shortcmd('DELE %s' % which)
def rset(self):
"""Not sure what this does."""
return self._shortcmd('RSET')
def noop(self):
"""Does nothing.
def quit(self):
"""Signoff: commit changes on server, unlock mailbox, close connection."""
try:
resp = self._shortcmd('QUIT')
except error_proto, val:
resp = val
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
One supposes the response indicates the server is alive.
"""
return self._shortcmd('NOOP')
#__del__ = quit
def rset(self):
"""Not sure what this does."""
return self._shortcmd('RSET')
# optional commands:
def rpop(self, user):
"""Not sure what this does."""
return self._shortcmd('RPOP %s' % user)
def quit(self):
"""Signoff: commit changes on server, unlock mailbox, close connection."""
try:
resp = self._shortcmd('QUIT')
except error_proto, val:
resp = val
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
#__del__ = quit
timestamp = regex.compile('\+OK.*\(<[^>]+>\)')
def apop(self, user, secret):
"""Authorisation
- only possible if server has supplied a timestamp in initial greeting.
# optional commands:
Args:
user - mailbox user;
secret - secret shared between client and server.
def rpop(self, user):
"""Not sure what this does."""
return self._shortcmd('RPOP %s' % user)
NB: mailbox is locked by server from here to 'quit()'
"""
if self.timestamp.match(self.welcome) <= 0:
raise error_proto('-ERR APOP not supported by server')
import md5
digest = md5.new(self.timestamp.group(1)+secret).digest()
digest = string.join(map(lambda x:'%02x'%ord(x), digest), '')
return self._shortcmd('APOP %s %s' % (user, digest))
timestamp = regex.compile('\+OK.*\(<[^>]+>\)')
def top(self, which, howmuch):
"""Retrieve message header of message number 'which'
and first 'howmuch' lines of message body.
def apop(self, user, secret):
"""Authorisation
Result is in form ['response', ['line', ...], octets].
"""
return self._longcmd('TOP %s %s' % (which, howmuch))
- only possible if server has supplied a timestamp in initial greeting.
Args:
user - mailbox user;
secret - secret shared between client and server.
def uidl(self, which=None):
"""Return message digest (unique id) list.
NB: mailbox is locked by server from here to 'quit()'
"""
if self.timestamp.match(self.welcome) <= 0:
raise error_proto('-ERR APOP not supported by server')
import md5
digest = md5.new(self.timestamp.group(1)+secret).digest()
digest = string.join(map(lambda x:'%02x'%ord(x), digest), '')
return self._shortcmd('APOP %s %s' % (user, digest))
def top(self, which, howmuch):
"""Retrieve message header of message number 'which'
and first 'howmuch' lines of message body.
Result is in form ['response', ['line', ...], octets].
"""
return self._longcmd('TOP %s %s' % (which, howmuch))
def uidl(self, which=None):
"""Return message digest (unique id) list.
If 'which', result contains unique id for that message
in the form 'response mesgnum uid', otherwise result is
the list ['response', ['mesgnum uid', ...], octets]
"""
if which:
return self._shortcmd('UIDL %s' % which)
return self._longcmd('UIDL')
If 'which', result contains unique id for that message
in the form 'response mesgnum uid', otherwise result is
the list ['response', ['mesgnum uid', ...], octets]
"""
if which:
return self._shortcmd('UIDL %s' % which)
return self._longcmd('UIDL')
if __name__ == "__main__":
a = POP3(TESTSERVER)
print a.getwelcome()
a.user(TESTACCOUNT)
a.pass_(TESTPASSWORD)
a.list()
(numMsgs, totalSize) = a.stat()
for i in range(1, numMsgs + 1):
(header, msg, octets) = a.retr(i)
print "Message ", `i`, ':'
for line in msg:
print ' ' + line
print '-----------------------'
a.quit()
a = POP3(TESTSERVER)
print a.getwelcome()
a.user(TESTACCOUNT)
a.pass_(TESTPASSWORD)
a.list()
(numMsgs, totalSize) = a.stat()
for i in range(1, numMsgs + 1):
(header, msg, octets) = a.retr(i)
print "Message ", `i`, ':'
for line in msg:
print ' ' + line
print '-----------------------'
a.quit()

View File

@ -129,7 +129,7 @@ class _posixfile_:
l_flags = fcntl.fcntl(file.fileno(), FCNTL.F_SETFL, l_flags)
if 'c' in which:
if 'c' in which:
arg = ('!' not in which) # 0 is don't, 1 is do close on exec
l_flags = fcntl.fcntl(file.fileno(), FCNTL.F_SETFD, arg)
@ -142,7 +142,7 @@ class _posixfile_:
if FCNTL.O_NDELAY & l_flags: which = which + 'n'
if FCNTL.O_SYNC & l_flags: which = which + 's'
return which
def lock(self, how, *args):
import struct, fcntl, FCNTL
@ -176,7 +176,7 @@ class _posixfile_:
'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5',
'bsdos2', 'bsdos3', 'bsdos4'):
flock = struct.pack('lxxxxlxxxxlhh', \
l_start, l_len, os.getpid(), l_type, l_whence)
l_start, l_len, os.getpid(), l_type, l_whence)
elif sys.platform in ['aix3', 'aix4']:
flock = struct.pack('hhlllii', \
l_type, l_whence, l_start, l_len, 0, 0, 0)

View File

@ -55,7 +55,7 @@ def join(a, *p):
# Trailing '/'es are stripped from head unless it is the root.
def split(p):
"""Split a pathname. Returns tuple "(head, tail)" where "tail" is
"""Split a pathname. Returns tuple "(head, tail)" where "tail" is
everything after the final slash. Either part may be empty."""
i = p.rfind('/') + 1
head, tail = p[:i], p[i:]
@ -93,7 +93,7 @@ def splitext(p):
# path. Useful on DOS/Windows/NT; on Unix, the drive is always empty.
def splitdrive(p):
"""Split a pathname into drive and path. On Posix, drive is always
"""Split a pathname into drive and path. On Posix, drive is always
empty."""
return '', p
@ -220,7 +220,7 @@ def sameopenfile(fp1, fp2):
def samestat(s1, s2):
"""Test whether two stat buffers reference the same file"""
return s1[stat.ST_INO] == s2[stat.ST_INO] and \
s1[stat.ST_DEV] == s2[stat.ST_DEV]
s1[stat.ST_DEV] == s2[stat.ST_DEV]
# Is a path a mount point?
@ -253,7 +253,7 @@ def ismount(path):
# or to impose a different order of visiting.
def walk(top, func, arg):
"""walk(top,func,arg) calls func(arg, d, files) for each directory "d"
"""walk(top,func,arg) calls func(arg, d, files) for each directory "d"
in the tree rooted at "top" (including "top" itself). "files" is a list
of all the files and subdirs in directory "d".
"""
@ -263,10 +263,10 @@ def walk(top, func, arg):
return
func(arg, top, names)
for name in names:
name = join(top, name)
st = os.lstat(name)
if stat.S_ISDIR(st[stat.ST_MODE]):
walk(name, func, arg)
name = join(top, name)
st = os.lstat(name)
if stat.S_ISDIR(st[stat.ST_MODE]):
walk(name, func, arg)
# Expand paths beginning with '~' or '~user'.
@ -279,7 +279,7 @@ def walk(top, func, arg):
# variable expansion.)
def expanduser(path):
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
do nothing."""
if path[:1] != '~':
return path
@ -349,7 +349,7 @@ def normpath(path):
for comp in comps:
if comp in ('', '.'):
continue
if (comp != '..' or (not initial_slash and not new_comps) or
if (comp != '..' or (not initial_slash and not new_comps) or
(new_comps and new_comps[-1] == '..')):
new_comps.append(comp)
elif new_comps:

View File

@ -44,7 +44,7 @@ below. If the ordinary character is not on the list, then the
resulting RE will match the second character.
\\number Matches the contents of the group of the same number.
\\A Matches only at the start of the string.
\\Z Matches only at the end of the string.
\\Z Matches only at the end of the string.
\\b Matches the empty string, but only at the start or end of a word.
\\B Matches the empty string, but not at the start or end of a word.
\\d Matches any decimal digit; equivalent to the set [0-9].
@ -55,7 +55,7 @@ resulting RE will match the second character.
With LOCALE, it will match the set [0-9_] plus characters defined
as letters for the current locale.
\\W Matches the complement of \\w.
\\\\ Matches a literal backslash.
\\\\ Matches a literal backslash.
This module exports the following functions:
match Match a regular expression pattern to the beginning of a string.
@ -100,8 +100,8 @@ from pcre import *
I = IGNORECASE
L = LOCALE
M = MULTILINE
S = DOTALL
X = VERBOSE
S = DOTALL
X = VERBOSE
#
@ -125,7 +125,7 @@ def _cachecompile(pattern, flags=0):
def match(pattern, string, flags=0):
"""match (pattern, string[, flags]) -> MatchObject or None
If zero or more characters at the beginning of string match the
regular expression pattern, return a corresponding MatchObject
instance. Return None if the string does not match the pattern;
@ -135,12 +135,12 @@ def match(pattern, string, flags=0):
search() instead.
"""
return _cachecompile(pattern, flags).match(string)
def search(pattern, string, flags=0):
"""search (pattern, string[, flags]) -> MatchObject or None
Scan through string looking for a location where the regular
expression pattern produces a match, and return a corresponding
MatchObject instance. Return None if no position in the string
@ -149,10 +149,10 @@ def search(pattern, string, flags=0):
"""
return _cachecompile(pattern, flags).search(string)
def sub(pattern, repl, string, count=0):
"""sub(pattern, repl, string[, count=0]) -> string
Return the string obtained by replacing the leftmost
non-overlapping occurrences of pattern in string by the
replacement repl. If the pattern isn't found, string is returned
@ -177,7 +177,7 @@ def sub(pattern, repl, string, count=0):
def subn(pattern, repl, string, count=0):
"""subn(pattern, repl, string[, count=0]) -> (string, num substitutions)
Perform the same operation as sub(), but return a tuple
(new_string, number_of_subs_made).
@ -185,10 +185,10 @@ def subn(pattern, repl, string, count=0):
if type(pattern) == type(''):
pattern = _cachecompile(pattern)
return pattern.subn(repl, string, count)
def split(pattern, string, maxsplit=0):
"""split(pattern, string[, maxsplit=0]) -> list of strings
Split string by the occurrences of pattern. If capturing
parentheses are used in pattern, then the text of all groups in
the pattern are also returned as part of the resulting list. If
@ -203,7 +203,7 @@ def split(pattern, string, maxsplit=0):
def findall(pattern, string):
"""findall(pattern, string) -> list
Return a list of all non-overlapping matches of pattern in
string. If one or more groups are present in the pattern, return a
list of groups; this will be a list of tuples if the pattern has
@ -216,7 +216,7 @@ def findall(pattern, string):
def escape(pattern):
"""escape(string) -> string
Return string with all non-alphanumerics backslashed; this is
useful if you want to match an arbitrary literal string that may
have regular expression metacharacters in it.
@ -242,7 +242,7 @@ def compile(pattern, flags=0):
groupindex={}
code=pcre_compile(pattern, flags, groupindex)
return RegexObject(pattern, flags, code, groupindex)
#
# Class definitions
@ -258,18 +258,18 @@ class RegexObject:
subn Same as sub, but also return the number of substitutions made.
split Split a string by the occurrences of the pattern.
findall Find all occurrences of the pattern in a string.
"""
def __init__(self, pattern, flags, code, groupindex):
self.code = code
self.code = code
self.flags = flags
self.pattern = pattern
self.groupindex = groupindex
def search(self, string, pos=0, endpos=None):
"""search(string[, pos][, endpos]) -> MatchObject or None
Scan through string looking for a location where this regular
expression produces a match, and return a corresponding
MatchObject instance. Return None if no position in the string
@ -277,24 +277,24 @@ class RegexObject:
a zero-length match at some point in the string. The optional
pos and endpos parameters have the same meaning as for the
match() method.
"""
if endpos is None or endpos>len(string):
if endpos is None or endpos>len(string):
endpos=len(string)
if endpos<pos: endpos=pos
regs = self.code.match(string, pos, endpos, 0)
if regs is None:
return None
self._num_regs=len(regs)
return MatchObject(self,
string,
pos, endpos,
regs)
def match(self, string, pos=0, endpos=None):
"""match(string[, pos][, endpos]) -> MatchObject or None
If zero or more characters at the beginning of string match
this regular expression, return a corresponding MatchObject
instance. Return None if the string does not match the
@ -316,7 +316,7 @@ class RegexObject:
searched for a match.
"""
if endpos is None or endpos>len(string):
if endpos is None or endpos>len(string):
endpos=len(string)
if endpos<pos: endpos=pos
regs = self.code.match(string, pos, endpos, ANCHORED)
@ -327,23 +327,23 @@ class RegexObject:
string,
pos, endpos,
regs)
def sub(self, repl, string, count=0):
"""sub(repl, string[, count=0]) -> string
Return the string obtained by replacing the leftmost
non-overlapping occurrences of the compiled pattern in string
by the replacement repl. If the pattern isn't found, string is
returned unchanged.
Identical to the sub() function, using the compiled pattern.
"""
return self.subn(repl, string, count)[0]
def subn(self, repl, source, count=0):
def subn(self, repl, source, count=0):
"""subn(repl, string[, count=0]) -> tuple
Perform the same operation as sub(), but return a tuple
(new_string, number_of_subs_made).
@ -399,17 +399,17 @@ class RegexObject:
n = n + 1
append(source[pos:])
return (string.join(results, ''), n)
def split(self, source, maxsplit=0):
"""split(source[, maxsplit=0]) -> list of strings
Split string by the occurrences of the compiled pattern. If
capturing parentheses are used in the pattern, then the text
of all groups in the pattern are also returned as part of the
resulting list. If maxsplit is nonzero, at most maxsplit
splits occur, and the remainder of the string is returned as
the final element of the list.
"""
if maxsplit < 0:
raise error, "negative split count"
@ -449,7 +449,7 @@ class RegexObject:
def findall(self, source):
"""findall(source) -> list
Return a list of all non-overlapping matches of the compiled
pattern in string. If one or more groups are present in the
pattern, return a list of groups; this will be a list of
@ -487,7 +487,7 @@ class RegexObject:
def __getinitargs__(self):
return (None,None,None,None) # any 4 elements, to work around
# problems with the
# pickle/cPickle modules not yet
# pickle/cPickle modules not yet
# ignoring the __init__ function
def __getstate__(self):
return self.pattern, self.flags, self.groupindex
@ -517,13 +517,13 @@ class MatchObject:
def __init__(self, re, string, pos, endpos, regs):
self.re = re
self.string = string
self.pos = pos
self.pos = pos
self.endpos = endpos
self.regs = regs
def start(self, g = 0):
"""start([group=0]) -> int or None
Return the index of the start of the substring matched by
group; group defaults to zero (meaning the whole matched
substring). Return -1 if group exists but did not contribute
@ -536,10 +536,10 @@ class MatchObject:
except (KeyError, TypeError):
raise IndexError, 'group %s is undefined' % `g`
return self.regs[g][0]
def end(self, g = 0):
"""end([group=0]) -> int or None
Return the indices of the end of the substring matched by
group; group defaults to zero (meaning the whole matched
substring). Return -1 if group exists but did not contribute
@ -552,10 +552,10 @@ class MatchObject:
except (KeyError, TypeError):
raise IndexError, 'group %s is undefined' % `g`
return self.regs[g][1]
def span(self, g = 0):
"""span([group=0]) -> tuple
Return the 2-tuple (m.start(group), m.end(group)). Note that
if group did not contribute to the match, this is (-1,
-1). Group defaults to zero (meaning the whole matched
@ -568,10 +568,10 @@ class MatchObject:
except (KeyError, TypeError):
raise IndexError, 'group %s is undefined' % `g`
return self.regs[g]
def groups(self, default=None):
"""groups([default=None]) -> tuple
Return a tuple containing all the subgroups of the match, from
1 up to however many groups are in the pattern. The default
argument is used for groups that did not participate in the
@ -589,7 +589,7 @@ class MatchObject:
def group(self, *groups):
"""group([group1, group2, ...]) -> string or tuple
Return one or more subgroups of the match. If there is a
single argument, the result is a single string; if there are
multiple arguments, the result is a tuple with one item per
@ -636,7 +636,7 @@ class MatchObject:
def groupdict(self, default=None):
"""groupdict([default=None]) -> dictionary
Return a dictionary containing all the named subgroups of the
match, keyed by the subgroup name. The default argument is
used for groups that did not participate in the match.

View File

@ -11,7 +11,7 @@
# Copyright 1994, by InfoSeek Corporation, all rights reserved.
# Written by James Roskind
#
#
# Permission to use, copy, modify, and distribute this Python software
# and its associated documentation for any purpose (subject to the
# restriction in the following sentence) without fee is hereby granted,
@ -24,7 +24,7 @@
# to remain in Python, compiled Python, or other languages (such as C)
# wherein the modified or derived code is exclusively imported into a
# Python module.
#
#
# INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY
@ -41,12 +41,12 @@ import time
import marshal
# Sample timer for use with
# Sample timer for use with
#i_count = 0
#def integer_timer():
# global i_count
# i_count = i_count + 1
# return i_count
# global i_count
# i_count = i_count + 1
# return i_count
#itimes = integer_timer # replace with C coded timer returning integers
#**************************************************************************
@ -57,515 +57,515 @@ import marshal
# simplified user interface
def run(statement, *args):
prof = Profile()
try:
prof = prof.run(statement)
except SystemExit:
pass
if args:
prof.dump_stats(args[0])
else:
return prof.print_stats()
prof = Profile()
try:
prof = prof.run(statement)
except SystemExit:
pass
if args:
prof.dump_stats(args[0])
else:
return prof.print_stats()
# print help
def help():
for dirname in sys.path:
fullname = os.path.join(dirname, 'profile.doc')
if os.path.exists(fullname):
sts = os.system('${PAGER-more} '+fullname)
if sts: print '*** Pager exit status:', sts
break
else:
print 'Sorry, can\'t find the help file "profile.doc"',
print 'along the Python search path'
for dirname in sys.path:
fullname = os.path.join(dirname, 'profile.doc')
if os.path.exists(fullname):
sts = os.system('${PAGER-more} '+fullname)
if sts: print '*** Pager exit status:', sts
break
else:
print 'Sorry, can\'t find the help file "profile.doc"',
print 'along the Python search path'
class Profile:
"""Profiler class.
self.cur is always a tuple. Each such tuple corresponds to a stack
frame that is currently active (self.cur[-2]). The following are the
definitions of its members. We use this external "parallel stack" to
avoid contaminating the program that we are profiling. (old profiler
used to write into the frames local dictionary!!) Derived classes
can change the definition of some entries, as long as they leave
[-2:] intact.
"""Profiler class.
[ 0] = Time that needs to be charged to the parent frame's function.
It is used so that a function call will not have to access the
timing data for the parent frame.
[ 1] = Total time spent in this frame's function, excluding time in
subfunctions
[ 2] = Cumulative time spent in this frame's function, including time in
all subfunctions to this frame.
[-3] = Name of the function that corresponds to this frame.
[-2] = Actual frame that we correspond to (used to sync exception handling)
[-1] = Our parent 6-tuple (corresponds to frame.f_back)
self.cur is always a tuple. Each such tuple corresponds to a stack
frame that is currently active (self.cur[-2]). The following are the
definitions of its members. We use this external "parallel stack" to
avoid contaminating the program that we are profiling. (old profiler
used to write into the frames local dictionary!!) Derived classes
can change the definition of some entries, as long as they leave
[-2:] intact.
Timing data for each function is stored as a 5-tuple in the dictionary
self.timings[]. The index is always the name stored in self.cur[4].
The following are the definitions of the members:
[ 0] = Time that needs to be charged to the parent frame's function.
It is used so that a function call will not have to access the
timing data for the parent frame.
[ 1] = Total time spent in this frame's function, excluding time in
subfunctions
[ 2] = Cumulative time spent in this frame's function, including time in
all subfunctions to this frame.
[-3] = Name of the function that corresponds to this frame.
[-2] = Actual frame that we correspond to (used to sync exception handling)
[-1] = Our parent 6-tuple (corresponds to frame.f_back)
[0] = The number of times this function was called, not counting direct
or indirect recursion,
[1] = Number of times this function appears on the stack, minus one
[2] = Total time spent internal to this function
[3] = Cumulative time that this function was present on the stack. In
non-recursive functions, this is the total execution time from start
to finish of each invocation of a function, including time spent in
all subfunctions.
[5] = A dictionary indicating for each function name, the number of times
it was called by us.
"""
Timing data for each function is stored as a 5-tuple in the dictionary
self.timings[]. The index is always the name stored in self.cur[4].
The following are the definitions of the members:
def __init__(self, timer=None):
self.timings = {}
self.cur = None
self.cmd = ""
[0] = The number of times this function was called, not counting direct
or indirect recursion,
[1] = Number of times this function appears on the stack, minus one
[2] = Total time spent internal to this function
[3] = Cumulative time that this function was present on the stack. In
non-recursive functions, this is the total execution time from start
to finish of each invocation of a function, including time spent in
all subfunctions.
[5] = A dictionary indicating for each function name, the number of times
it was called by us.
"""
self.dispatch = { \
'call' : self.trace_dispatch_call, \
'return' : self.trace_dispatch_return, \
'exception': self.trace_dispatch_exception, \
}
def __init__(self, timer=None):
self.timings = {}
self.cur = None
self.cmd = ""
if not timer:
if os.name == 'mac':
import MacOS
self.timer = MacOS.GetTicks
self.dispatcher = self.trace_dispatch_mac
self.get_time = self.get_time_mac
elif hasattr(time, 'clock'):
self.timer = time.clock
self.dispatcher = self.trace_dispatch_i
elif hasattr(os, 'times'):
self.timer = os.times
self.dispatcher = self.trace_dispatch
else:
self.timer = time.time
self.dispatcher = self.trace_dispatch_i
else:
self.timer = timer
t = self.timer() # test out timer function
try:
if len(t) == 2:
self.dispatcher = self.trace_dispatch
else:
self.dispatcher = self.trace_dispatch_l
except TypeError:
self.dispatcher = self.trace_dispatch_i
self.t = self.get_time()
self.simulate_call('profiler')
self.dispatch = { \
'call' : self.trace_dispatch_call, \
'return' : self.trace_dispatch_return, \
'exception': self.trace_dispatch_exception, \
}
if not timer:
if os.name == 'mac':
import MacOS
self.timer = MacOS.GetTicks
self.dispatcher = self.trace_dispatch_mac
self.get_time = self.get_time_mac
elif hasattr(time, 'clock'):
self.timer = time.clock
self.dispatcher = self.trace_dispatch_i
elif hasattr(os, 'times'):
self.timer = os.times
self.dispatcher = self.trace_dispatch
else:
self.timer = time.time
self.dispatcher = self.trace_dispatch_i
else:
self.timer = timer
t = self.timer() # test out timer function
try:
if len(t) == 2:
self.dispatcher = self.trace_dispatch
else:
self.dispatcher = self.trace_dispatch_l
except TypeError:
self.dispatcher = self.trace_dispatch_i
self.t = self.get_time()
self.simulate_call('profiler')
def get_time(self): # slow simulation of method to acquire time
t = self.timer()
if type(t) == type(()) or type(t) == type([]):
t = reduce(lambda x,y: x+y, t, 0)
return t
def get_time_mac(self):
return self.timer()/60.0
def get_time(self): # slow simulation of method to acquire time
t = self.timer()
if type(t) == type(()) or type(t) == type([]):
t = reduce(lambda x,y: x+y, t, 0)
return t
# Heavily optimized dispatch routine for os.times() timer
def get_time_mac(self):
return self.timer()/60.0
def trace_dispatch(self, frame, event, arg):
t = self.timer()
t = t[0] + t[1] - self.t # No Calibration constant
# t = t[0] + t[1] - self.t - .00053 # Calibration constant
# Heavily optimized dispatch routine for os.times() timer
if self.dispatch[event](frame,t):
t = self.timer()
self.t = t[0] + t[1]
else:
r = self.timer()
self.t = r[0] + r[1] - t # put back unrecorded delta
return
def trace_dispatch(self, frame, event, arg):
t = self.timer()
t = t[0] + t[1] - self.t # No Calibration constant
# t = t[0] + t[1] - self.t - .00053 # Calibration constant
if self.dispatch[event](frame,t):
t = self.timer()
self.t = t[0] + t[1]
else:
r = self.timer()
self.t = r[0] + r[1] - t # put back unrecorded delta
return
# Dispatch routine for best timer program (return = scalar integer)
# Dispatch routine for best timer program (return = scalar integer)
def trace_dispatch_i(self, frame, event, arg):
t = self.timer() - self.t # - 1 # Integer calibration constant
if self.dispatch[event](frame,t):
self.t = self.timer()
else:
self.t = self.timer() - t # put back unrecorded delta
return
# Dispatch routine for macintosh (timer returns time in ticks of 1/60th second)
def trace_dispatch_i(self, frame, event, arg):
t = self.timer() - self.t # - 1 # Integer calibration constant
if self.dispatch[event](frame,t):
self.t = self.timer()
else:
self.t = self.timer() - t # put back unrecorded delta
return
def trace_dispatch_mac(self, frame, event, arg):
t = self.timer()/60.0 - self.t # - 1 # Integer calibration constant
if self.dispatch[event](frame,t):
self.t = self.timer()/60.0
else:
self.t = self.timer()/60.0 - t # put back unrecorded delta
return
# Dispatch routine for macintosh (timer returns time in ticks of 1/60th second)
def trace_dispatch_mac(self, frame, event, arg):
t = self.timer()/60.0 - self.t # - 1 # Integer calibration constant
if self.dispatch[event](frame,t):
self.t = self.timer()/60.0
else:
self.t = self.timer()/60.0 - t # put back unrecorded delta
return
# SLOW generic dispatch routine for timer returning lists of numbers
# SLOW generic dispatch routine for timer returning lists of numbers
def trace_dispatch_l(self, frame, event, arg):
t = self.get_time() - self.t
def trace_dispatch_l(self, frame, event, arg):
t = self.get_time() - self.t
if self.dispatch[event](frame,t):
self.t = self.get_time()
else:
self.t = self.get_time()-t # put back unrecorded delta
return
if self.dispatch[event](frame,t):
self.t = self.get_time()
else:
self.t = self.get_time()-t # put back unrecorded delta
return
def trace_dispatch_exception(self, frame, t):
rt, rtt, rct, rfn, rframe, rcur = self.cur
if (not rframe is frame) and rcur:
return self.trace_dispatch_return(rframe, t)
return 0
def trace_dispatch_exception(self, frame, t):
rt, rtt, rct, rfn, rframe, rcur = self.cur
if (not rframe is frame) and rcur:
return self.trace_dispatch_return(rframe, t)
return 0
def trace_dispatch_call(self, frame, t):
fcode = frame.f_code
fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
self.cur = (t, 0, 0, fn, frame, self.cur)
if self.timings.has_key(fn):
cc, ns, tt, ct, callers = self.timings[fn]
self.timings[fn] = cc, ns + 1, tt, ct, callers
else:
self.timings[fn] = 0, 0, 0, 0, {}
return 1
def trace_dispatch_call(self, frame, t):
fcode = frame.f_code
fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
self.cur = (t, 0, 0, fn, frame, self.cur)
if self.timings.has_key(fn):
cc, ns, tt, ct, callers = self.timings[fn]
self.timings[fn] = cc, ns + 1, tt, ct, callers
else:
self.timings[fn] = 0, 0, 0, 0, {}
return 1
def trace_dispatch_return(self, frame, t):
# if not frame is self.cur[-2]: raise "Bad return", self.cur[3]
def trace_dispatch_return(self, frame, t):
# if not frame is self.cur[-2]: raise "Bad return", self.cur[3]
# Prefix "r" means part of the Returning or exiting frame
# Prefix "p" means part of the Previous or older frame
# Prefix "r" means part of the Returning or exiting frame
# Prefix "p" means part of the Previous or older frame
rt, rtt, rct, rfn, frame, rcur = self.cur
rtt = rtt + t
sft = rtt + rct
rt, rtt, rct, rfn, frame, rcur = self.cur
rtt = rtt + t
sft = rtt + rct
pt, ptt, pct, pfn, pframe, pcur = rcur
self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
pt, ptt, pct, pfn, pframe, pcur = rcur
self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
cc, ns, tt, ct, callers = self.timings[rfn]
if not ns:
ct = ct + sft
cc = cc + 1
if callers.has_key(pfn):
callers[pfn] = callers[pfn] + 1 # hack: gather more
# stats such as the amount of time added to ct courtesy
# of this specific call, and the contribution to cc
# courtesy of this call.
else:
callers[pfn] = 1
self.timings[rfn] = cc, ns - 1, tt+rtt, ct, callers
cc, ns, tt, ct, callers = self.timings[rfn]
if not ns:
ct = ct + sft
cc = cc + 1
if callers.has_key(pfn):
callers[pfn] = callers[pfn] + 1 # hack: gather more
# stats such as the amount of time added to ct courtesy
# of this specific call, and the contribution to cc
# courtesy of this call.
else:
callers[pfn] = 1
self.timings[rfn] = cc, ns - 1, tt+rtt, ct, callers
return 1
return 1
# The next few function play with self.cmd. By carefully preloading
# our parallel stack, we can force the profiled result to include
# an arbitrary string as the name of the calling function.
# We use self.cmd as that string, and the resulting stats look
# very nice :-).
# The next few function play with self.cmd. By carefully preloading
# our parallel stack, we can force the profiled result to include
# an arbitrary string as the name of the calling function.
# We use self.cmd as that string, and the resulting stats look
# very nice :-).
def set_cmd(self, cmd):
if self.cur[-1]: return # already set
self.cmd = cmd
self.simulate_call(cmd)
def set_cmd(self, cmd):
if self.cur[-1]: return # already set
self.cmd = cmd
self.simulate_call(cmd)
class fake_code:
def __init__(self, filename, line, name):
self.co_filename = filename
self.co_line = line
self.co_name = name
self.co_firstlineno = 0
class fake_code:
def __init__(self, filename, line, name):
self.co_filename = filename
self.co_line = line
self.co_name = name
self.co_firstlineno = 0
def __repr__(self):
return repr((self.co_filename, self.co_line, self.co_name))
def __repr__(self):
return repr((self.co_filename, self.co_line, self.co_name))
class fake_frame:
def __init__(self, code, prior):
self.f_code = code
self.f_back = prior
def simulate_call(self, name):
code = self.fake_code('profile', 0, name)
if self.cur:
pframe = self.cur[-2]
else:
pframe = None
frame = self.fake_frame(code, pframe)
a = self.dispatch['call'](frame, 0)
return
class fake_frame:
def __init__(self, code, prior):
self.f_code = code
self.f_back = prior
# collect stats from pending stack, including getting final
# timings for self.cmd frame.
def simulate_cmd_complete(self):
t = self.get_time() - self.t
while self.cur[-1]:
# We *can* cause assertion errors here if
# dispatch_trace_return checks for a frame match!
a = self.dispatch['return'](self.cur[-2], t)
t = 0
self.t = self.get_time() - t
def simulate_call(self, name):
code = self.fake_code('profile', 0, name)
if self.cur:
pframe = self.cur[-2]
else:
pframe = None
frame = self.fake_frame(code, pframe)
a = self.dispatch['call'](frame, 0)
return
def print_stats(self):
import pstats
pstats.Stats(self).strip_dirs().sort_stats(-1). \
print_stats()
# collect stats from pending stack, including getting final
# timings for self.cmd frame.
def dump_stats(self, file):
f = open(file, 'wb')
self.create_stats()
marshal.dump(self.stats, f)
f.close()
def create_stats(self):
self.simulate_cmd_complete()
self.snapshot_stats()
def snapshot_stats(self):
self.stats = {}
for func in self.timings.keys():
cc, ns, tt, ct, callers = self.timings[func]
callers = callers.copy()
nc = 0
for func_caller in callers.keys():
nc = nc + callers[func_caller]
self.stats[func] = cc, nc, tt, ct, callers
def simulate_cmd_complete(self):
t = self.get_time() - self.t
while self.cur[-1]:
# We *can* cause assertion errors here if
# dispatch_trace_return checks for a frame match!
a = self.dispatch['return'](self.cur[-2], t)
t = 0
self.t = self.get_time() - t
# The following two methods can be called by clients to use
# a profiler to profile a statement, given as a string.
def run(self, cmd):
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
self.set_cmd(cmd)
sys.setprofile(self.dispatcher)
try:
exec cmd in globals, locals
finally:
sys.setprofile(None)
return self
def print_stats(self):
import pstats
pstats.Stats(self).strip_dirs().sort_stats(-1). \
print_stats()
# This method is more useful to profile a single function call.
def runcall(self, func, *args):
self.set_cmd(`func`)
sys.setprofile(self.dispatcher)
try:
return apply(func, args)
finally:
sys.setprofile(None)
def dump_stats(self, file):
f = open(file, 'wb')
self.create_stats()
marshal.dump(self.stats, f)
f.close()
def create_stats(self):
self.simulate_cmd_complete()
self.snapshot_stats()
def snapshot_stats(self):
self.stats = {}
for func in self.timings.keys():
cc, ns, tt, ct, callers = self.timings[func]
callers = callers.copy()
nc = 0
for func_caller in callers.keys():
nc = nc + callers[func_caller]
self.stats[func] = cc, nc, tt, ct, callers
#******************************************************************
# The following calculates the overhead for using a profiler. The
# problem is that it takes a fair amount of time for the profiler
# to stop the stopwatch (from the time it receives an event).
# Similarly, there is a delay from the time that the profiler
# re-starts the stopwatch before the user's code really gets to
# continue. The following code tries to measure the difference on
# a per-event basis. The result can the be placed in the
# Profile.dispatch_event() routine for the given platform. Note
# that this difference is only significant if there are a lot of
# events, and relatively little user code per event. For example,
# code with small functions will typically benefit from having the
# profiler calibrated for the current platform. This *could* be
# done on the fly during init() time, but it is not worth the
# effort. Also note that if too large a value specified, then
# execution time on some functions will actually appear as a
# negative number. It is *normal* for some functions (with very
# low call counts) to have such negative stats, even if the
# calibration figure is "correct."
#
# One alternative to profile-time calibration adjustments (i.e.,
# adding in the magic little delta during each event) is to track
# more carefully the number of events (and cumulatively, the number
# of events during sub functions) that are seen. If this were
# done, then the arithmetic could be done after the fact (i.e., at
# display time). Currently, we track only call/return events.
# These values can be deduced by examining the callees and callers
# vectors for each functions. Hence we *can* almost correct the
# internal time figure at print time (note that we currently don't
# track exception event processing counts). Unfortunately, there
# is currently no similar information for cumulative sub-function
# time. It would not be hard to "get all this info" at profiler
# time. Specifically, we would have to extend the tuples to keep
# counts of this in each frame, and then extend the defs of timing
# tuples to include the significant two figures. I'm a bit fearful
# that this additional feature will slow the heavily optimized
# event/time ratio (i.e., the profiler would run slower, fur a very
# low "value added" feature.)
#
# Plugging in the calibration constant doesn't slow down the
# profiler very much, and the accuracy goes way up.
#**************************************************************
def calibrate(self, m):
# Modified by Tim Peters
n = m
s = self.get_time()
while n:
self.simple()
n = n - 1
f = self.get_time()
my_simple = f - s
#print "Simple =", my_simple,
# The following two methods can be called by clients to use
# a profiler to profile a statement, given as a string.
n = m
s = self.get_time()
while n:
self.instrumented()
n = n - 1
f = self.get_time()
my_inst = f - s
# print "Instrumented =", my_inst
avg_cost = (my_inst - my_simple)/m
#print "Delta/call =", avg_cost, "(profiler fixup constant)"
return avg_cost
def run(self, cmd):
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
# simulate a program with no profiler activity
def simple(self):
a = 1
pass
def runctx(self, cmd, globals, locals):
self.set_cmd(cmd)
sys.setprofile(self.dispatcher)
try:
exec cmd in globals, locals
finally:
sys.setprofile(None)
return self
# simulate a program with call/return event processing
def instrumented(self):
a = 1
self.profiler_simulation(a, a, a)
# This method is more useful to profile a single function call.
def runcall(self, func, *args):
self.set_cmd(`func`)
sys.setprofile(self.dispatcher)
try:
return apply(func, args)
finally:
sys.setprofile(None)
# simulate an event processing activity (from user's perspective)
def profiler_simulation(self, x, y, z):
t = self.timer()
## t = t[0] + t[1]
self.ut = t
#******************************************************************
# The following calculates the overhead for using a profiler. The
# problem is that it takes a fair amount of time for the profiler
# to stop the stopwatch (from the time it receives an event).
# Similarly, there is a delay from the time that the profiler
# re-starts the stopwatch before the user's code really gets to
# continue. The following code tries to measure the difference on
# a per-event basis. The result can the be placed in the
# Profile.dispatch_event() routine for the given platform. Note
# that this difference is only significant if there are a lot of
# events, and relatively little user code per event. For example,
# code with small functions will typically benefit from having the
# profiler calibrated for the current platform. This *could* be
# done on the fly during init() time, but it is not worth the
# effort. Also note that if too large a value specified, then
# execution time on some functions will actually appear as a
# negative number. It is *normal* for some functions (with very
# low call counts) to have such negative stats, even if the
# calibration figure is "correct."
#
# One alternative to profile-time calibration adjustments (i.e.,
# adding in the magic little delta during each event) is to track
# more carefully the number of events (and cumulatively, the number
# of events during sub functions) that are seen. If this were
# done, then the arithmetic could be done after the fact (i.e., at
# display time). Currently, we track only call/return events.
# These values can be deduced by examining the callees and callers
# vectors for each functions. Hence we *can* almost correct the
# internal time figure at print time (note that we currently don't
# track exception event processing counts). Unfortunately, there
# is currently no similar information for cumulative sub-function
# time. It would not be hard to "get all this info" at profiler
# time. Specifically, we would have to extend the tuples to keep
# counts of this in each frame, and then extend the defs of timing
# tuples to include the significant two figures. I'm a bit fearful
# that this additional feature will slow the heavily optimized
# event/time ratio (i.e., the profiler would run slower, fur a very
# low "value added" feature.)
#
# Plugging in the calibration constant doesn't slow down the
# profiler very much, and the accuracy goes way up.
#**************************************************************
def calibrate(self, m):
# Modified by Tim Peters
n = m
s = self.get_time()
while n:
self.simple()
n = n - 1
f = self.get_time()
my_simple = f - s
#print "Simple =", my_simple,
n = m
s = self.get_time()
while n:
self.instrumented()
n = n - 1
f = self.get_time()
my_inst = f - s
# print "Instrumented =", my_inst
avg_cost = (my_inst - my_simple)/m
#print "Delta/call =", avg_cost, "(profiler fixup constant)"
return avg_cost
# simulate a program with no profiler activity
def simple(self):
a = 1
pass
# simulate a program with call/return event processing
def instrumented(self):
a = 1
self.profiler_simulation(a, a, a)
# simulate an event processing activity (from user's perspective)
def profiler_simulation(self, x, y, z):
t = self.timer()
## t = t[0] + t[1]
self.ut = t
class OldProfile(Profile):
"""A derived profiler that simulates the old style profile, providing
errant results on recursive functions. The reason for the usefulness of
this profiler is that it runs faster (i.e., less overhead). It still
creates all the caller stats, and is quite useful when there is *no*
recursion in the user's code.
This code also shows how easy it is to create a modified profiler.
"""
"""A derived profiler that simulates the old style profile, providing
errant results on recursive functions. The reason for the usefulness of
this profiler is that it runs faster (i.e., less overhead). It still
creates all the caller stats, and is quite useful when there is *no*
recursion in the user's code.
def trace_dispatch_exception(self, frame, t):
rt, rtt, rct, rfn, rframe, rcur = self.cur
if rcur and not rframe is frame:
return self.trace_dispatch_return(rframe, t)
return 0
This code also shows how easy it is to create a modified profiler.
"""
def trace_dispatch_call(self, frame, t):
fn = `frame.f_code`
self.cur = (t, 0, 0, fn, frame, self.cur)
if self.timings.has_key(fn):
tt, ct, callers = self.timings[fn]
self.timings[fn] = tt, ct, callers
else:
self.timings[fn] = 0, 0, {}
return 1
def trace_dispatch_exception(self, frame, t):
rt, rtt, rct, rfn, rframe, rcur = self.cur
if rcur and not rframe is frame:
return self.trace_dispatch_return(rframe, t)
return 0
def trace_dispatch_return(self, frame, t):
rt, rtt, rct, rfn, frame, rcur = self.cur
rtt = rtt + t
sft = rtt + rct
def trace_dispatch_call(self, frame, t):
fn = `frame.f_code`
pt, ptt, pct, pfn, pframe, pcur = rcur
self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
self.cur = (t, 0, 0, fn, frame, self.cur)
if self.timings.has_key(fn):
tt, ct, callers = self.timings[fn]
self.timings[fn] = tt, ct, callers
else:
self.timings[fn] = 0, 0, {}
return 1
tt, ct, callers = self.timings[rfn]
if callers.has_key(pfn):
callers[pfn] = callers[pfn] + 1
else:
callers[pfn] = 1
self.timings[rfn] = tt+rtt, ct + sft, callers
def trace_dispatch_return(self, frame, t):
rt, rtt, rct, rfn, frame, rcur = self.cur
rtt = rtt + t
sft = rtt + rct
return 1
pt, ptt, pct, pfn, pframe, pcur = rcur
self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
tt, ct, callers = self.timings[rfn]
if callers.has_key(pfn):
callers[pfn] = callers[pfn] + 1
else:
callers[pfn] = 1
self.timings[rfn] = tt+rtt, ct + sft, callers
return 1
def snapshot_stats(self):
self.stats = {}
for func in self.timings.keys():
tt, ct, callers = self.timings[func]
callers = callers.copy()
nc = 0
for func_caller in callers.keys():
nc = nc + callers[func_caller]
self.stats[func] = nc, nc, tt, ct, callers
def snapshot_stats(self):
self.stats = {}
for func in self.timings.keys():
tt, ct, callers = self.timings[func]
callers = callers.copy()
nc = 0
for func_caller in callers.keys():
nc = nc + callers[func_caller]
self.stats[func] = nc, nc, tt, ct, callers
class HotProfile(Profile):
"""The fastest derived profile example. It does not calculate
caller-callee relationships, and does not calculate cumulative
time under a function. It only calculates time spent in a
function, so it runs very quickly due to its very low overhead.
"""
"""The fastest derived profile example. It does not calculate
caller-callee relationships, and does not calculate cumulative
time under a function. It only calculates time spent in a
function, so it runs very quickly due to its very low overhead.
"""
def trace_dispatch_exception(self, frame, t):
rt, rtt, rfn, rframe, rcur = self.cur
if rcur and not rframe is frame:
return self.trace_dispatch_return(rframe, t)
return 0
def trace_dispatch_exception(self, frame, t):
rt, rtt, rfn, rframe, rcur = self.cur
if rcur and not rframe is frame:
return self.trace_dispatch_return(rframe, t)
return 0
def trace_dispatch_call(self, frame, t):
self.cur = (t, 0, frame, self.cur)
return 1
def trace_dispatch_call(self, frame, t):
self.cur = (t, 0, frame, self.cur)
return 1
def trace_dispatch_return(self, frame, t):
rt, rtt, frame, rcur = self.cur
def trace_dispatch_return(self, frame, t):
rt, rtt, frame, rcur = self.cur
rfn = `frame.f_code`
rfn = `frame.f_code`
pt, ptt, pframe, pcur = rcur
self.cur = pt, ptt+rt, pframe, pcur
pt, ptt, pframe, pcur = rcur
self.cur = pt, ptt+rt, pframe, pcur
if self.timings.has_key(rfn):
nc, tt = self.timings[rfn]
self.timings[rfn] = nc + 1, rt + rtt + tt
else:
self.timings[rfn] = 1, rt + rtt
if self.timings.has_key(rfn):
nc, tt = self.timings[rfn]
self.timings[rfn] = nc + 1, rt + rtt + tt
else:
self.timings[rfn] = 1, rt + rtt
return 1
return 1
def snapshot_stats(self):
self.stats = {}
for func in self.timings.keys():
nc, tt = self.timings[func]
self.stats[func] = nc, nc, tt, 0, {}
def snapshot_stats(self):
self.stats = {}
for func in self.timings.keys():
nc, tt = self.timings[func]
self.stats[func] = nc, nc, tt, 0, {}
#****************************************************************************
def Stats(*args):
print 'Report generating functions are in the "pstats" module\a'
print 'Report generating functions are in the "pstats" module\a'
# When invoked as main program, invoke the profiler on a script
if __name__ == '__main__':
import sys
import os
if not sys.argv[1:]:
print "usage: profile.py scriptfile [arg] ..."
sys.exit(2)
import sys
import os
if not sys.argv[1:]:
print "usage: profile.py scriptfile [arg] ..."
sys.exit(2)
filename = sys.argv[1] # Get script filename
filename = sys.argv[1] # Get script filename
del sys.argv[0] # Hide "profile.py" from argument list
del sys.argv[0] # Hide "profile.py" from argument list
# Insert script directory in front of module search path
sys.path.insert(0, os.path.dirname(filename))
# Insert script directory in front of module search path
sys.path.insert(0, os.path.dirname(filename))
run('execfile(' + `filename` + ')')
run('execfile(' + `filename` + ')')

View File

@ -9,7 +9,7 @@
# Copyright 1994, by InfoSeek Corporation, all rights reserved.
# Written by James Roskind
#
#
# Permission to use, copy, modify, and distribute this Python software
# and its associated documentation for any purpose (subject to the
# restriction in the following sentence) without fee is hereby granted,
@ -22,7 +22,7 @@
# to remain in Python, compiled Python, or other languages (such as C)
# wherein the modified or derived code is exclusively imported into a
# Python module.
#
#
# INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY
@ -41,486 +41,485 @@ import re
import fpformat
class Stats:
"""This class is used for creating reports from data generated by the
Profile class. It is a "friend" of that class, and imports data either
by direct access to members of Profile class, or by reading in a dictionary
that was emitted (via marshal) from the Profile class.
"""This class is used for creating reports from data generated by the
Profile class. It is a "friend" of that class, and imports data either
by direct access to members of Profile class, or by reading in a dictionary
that was emitted (via marshal) from the Profile class.
The big change from the previous Profiler (in terms of raw functionality)
is that an "add()" method has been provided to combine Stats from
several distinct profile runs. Both the constructor and the add()
method now take arbitrarily many file names as arguments.
The big change from the previous Profiler (in terms of raw functionality)
is that an "add()" method has been provided to combine Stats from
several distinct profile runs. Both the constructor and the add()
method now take arbitrarily many file names as arguments.
All the print methods now take an argument that indicates how many lines
to print. If the arg is a floating point number between 0 and 1.0, then
it is taken as a decimal percentage of the available lines to be printed
(e.g., .1 means print 10% of all available lines). If it is an integer,
it is taken to mean the number of lines of data that you wish to have
printed.
All the print methods now take an argument that indicates how many lines
to print. If the arg is a floating point number between 0 and 1.0, then
it is taken as a decimal percentage of the available lines to be printed
(e.g., .1 means print 10% of all available lines). If it is an integer,
it is taken to mean the number of lines of data that you wish to have
printed.
The sort_stats() method now processes some additional options (i.e., in
addition to the old -1, 0, 1, or 2). It takes an arbitrary number of quoted
strings to select the sort order. For example sort_stats('time', 'name')
sorts on the major key of "internal function time", and on the minor
key of 'the name of the function'. Look at the two tables in sort_stats()
and get_sort_arg_defs(self) for more examples.
The sort_stats() method now processes some additional options (i.e., in
addition to the old -1, 0, 1, or 2). It takes an arbitrary number of quoted
strings to select the sort order. For example sort_stats('time', 'name')
sorts on the major key of "internal function time", and on the minor
key of 'the name of the function'. Look at the two tables in sort_stats()
and get_sort_arg_defs(self) for more examples.
All methods now return "self", so you can string together commands like:
Stats('foo', 'goo').strip_dirs().sort_stats('calls').\
print_stats(5).print_callers(5)
"""
def __init__(self, *args):
if not len(args):
arg = None
else:
arg = args[0]
args = args[1:]
self.init(arg)
apply(self.add, args).ignore()
def init(self, arg):
self.all_callees = None # calc only if needed
self.files = []
self.fcn_list = None
self.total_tt = 0
self.total_calls = 0
self.prim_calls = 0
self.max_name_len = 0
self.top_level = {}
self.stats = {}
self.sort_arg_dict = {}
self.load_stats(arg)
trouble = 1
try:
self.get_top_level_stats()
trouble = 0
finally:
if trouble:
print "Invalid timing data",
if self.files: print self.files[-1],
print
All methods now return "self", so you can string together commands like:
Stats('foo', 'goo').strip_dirs().sort_stats('calls').\
print_stats(5).print_callers(5)
"""
def __init__(self, *args):
if not len(args):
arg = None
else:
arg = args[0]
args = args[1:]
self.init(arg)
apply(self.add, args).ignore()
def init(self, arg):
self.all_callees = None # calc only if needed
self.files = []
self.fcn_list = None
self.total_tt = 0
self.total_calls = 0
self.prim_calls = 0
self.max_name_len = 0
self.top_level = {}
self.stats = {}
self.sort_arg_dict = {}
self.load_stats(arg)
trouble = 1
try:
self.get_top_level_stats()
trouble = 0
finally:
if trouble:
print "Invalid timing data",
if self.files: print self.files[-1],
print
def load_stats(self, arg):
if not arg: self.stats = {}
elif type(arg) == type(""):
f = open(arg, 'rb')
self.stats = marshal.load(f)
f.close()
try:
file_stats = os.stat(arg)
arg = time.ctime(file_stats[8]) + " " + arg
except: # in case this is not unix
pass
self.files = [ arg ]
elif hasattr(arg, 'create_stats'):
arg.create_stats()
self.stats = arg.stats
arg.stats = {}
if not self.stats:
raise TypeError, "Cannot create or construct a " \
+ `self.__class__` \
+ " object from '" + `arg` + "'"
return
def load_stats(self, arg):
if not arg: self.stats = {}
elif type(arg) == type(""):
f = open(arg, 'rb')
self.stats = marshal.load(f)
f.close()
try:
file_stats = os.stat(arg)
arg = time.ctime(file_stats[8]) + " " + arg
except: # in case this is not unix
pass
self.files = [ arg ]
elif hasattr(arg, 'create_stats'):
arg.create_stats()
self.stats = arg.stats
arg.stats = {}
if not self.stats:
raise TypeError, "Cannot create or construct a " \
+ `self.__class__` \
+ " object from '" + `arg` + "'"
return
def get_top_level_stats(self):
for func in self.stats.keys():
cc, nc, tt, ct, callers = self.stats[func]
self.total_calls = self.total_calls + nc
self.prim_calls = self.prim_calls + cc
self.total_tt = self.total_tt + tt
if callers.has_key(("jprofile", 0, "profiler")):
self.top_level[func] = None
if len(func_std_string(func)) > self.max_name_len:
self.max_name_len = len(func_std_string(func))
def add(self, *arg_list):
if not arg_list: return self
if len(arg_list) > 1: apply(self.add, arg_list[1:])
other = arg_list[0]
if type(self) != type(other) or \
self.__class__ != other.__class__:
other = Stats(other)
self.files = self.files + other.files
self.total_calls = self.total_calls + other.total_calls
self.prim_calls = self.prim_calls + other.prim_calls
self.total_tt = self.total_tt + other.total_tt
for func in other.top_level.keys():
self.top_level[func] = None
def get_top_level_stats(self):
for func in self.stats.keys():
cc, nc, tt, ct, callers = self.stats[func]
self.total_calls = self.total_calls + nc
self.prim_calls = self.prim_calls + cc
self.total_tt = self.total_tt + tt
if callers.has_key(("jprofile", 0, "profiler")):
self.top_level[func] = None
if len(func_std_string(func)) > self.max_name_len:
self.max_name_len = len(func_std_string(func))
if self.max_name_len < other.max_name_len:
self.max_name_len = other.max_name_len
def add(self, *arg_list):
if not arg_list: return self
if len(arg_list) > 1: apply(self.add, arg_list[1:])
other = arg_list[0]
if type(self) != type(other) or \
self.__class__ != other.__class__:
other = Stats(other)
self.files = self.files + other.files
self.total_calls = self.total_calls + other.total_calls
self.prim_calls = self.prim_calls + other.prim_calls
self.total_tt = self.total_tt + other.total_tt
for func in other.top_level.keys():
self.top_level[func] = None
self.fcn_list = None
if self.max_name_len < other.max_name_len:
self.max_name_len = other.max_name_len
for func in other.stats.keys():
if self.stats.has_key(func):
old_func_stat = self.stats[func]
else:
old_func_stat = (0, 0, 0, 0, {},)
self.stats[func] = add_func_stats(old_func_stat, \
other.stats[func])
return self
self.fcn_list = None
# list the tuple indices and directions for sorting,
# along with some printable description
sort_arg_dict_default = {\
"calls" : (((1,-1), ), "call count"),\
"cumulative": (((3,-1), ), "cumulative time"),\
"file" : (((4, 1), ), "file name"),\
"line" : (((5, 1), ), "line number"),\
"module" : (((4, 1), ), "file name"),\
"name" : (((6, 1), ), "function name"),\
"nfl" : (((6, 1),(4, 1),(5, 1),), "name/file/line"), \
"pcalls" : (((0,-1), ), "call count"),\
"stdname" : (((7, 1), ), "standard name"),\
"time" : (((2,-1), ), "internal time"),\
}
def get_sort_arg_defs(self):
"""Expand all abbreviations that are unique."""
if not self.sort_arg_dict:
self.sort_arg_dict = dict = {}
std_list = dict.keys()
bad_list = {}
for word in self.sort_arg_dict_default.keys():
fragment = word
while fragment:
if not fragment:
break
if dict.has_key(fragment):
bad_list[fragment] = 0
break
dict[fragment] = self. \
sort_arg_dict_default[word]
fragment = fragment[:-1]
for word in bad_list.keys():
del dict[word]
return self.sort_arg_dict
def sort_stats(self, *field):
if not field:
self.fcn_list = 0
return self
if len(field) == 1 and type(field[0]) == type(1):
# Be compatible with old profiler
field = [ {-1: "stdname", \
0:"calls", \
1:"time", \
2: "cumulative" } [ field[0] ] ]
sort_arg_defs = self.get_sort_arg_defs()
sort_tuple = ()
self.sort_type = ""
connector = ""
for word in field:
sort_tuple = sort_tuple + sort_arg_defs[word][0]
self.sort_type = self.sort_type + connector + \
sort_arg_defs[word][1]
connector = ", "
stats_list = []
for func in self.stats.keys():
cc, nc, tt, ct, callers = self.stats[func]
stats_list.append((cc, nc, tt, ct) + func_split(func) \
+ (func_std_string(func), func,) )
stats_list.sort(TupleComp(sort_tuple).compare)
self.fcn_list = fcn_list = []
for tuple in stats_list:
fcn_list.append(tuple[-1])
return self
def reverse_order(self):
if self.fcn_list: self.fcn_list.reverse()
return self
def strip_dirs(self):
oldstats = self.stats
self.stats = newstats = {}
max_name_len = 0
for func in oldstats.keys():
cc, nc, tt, ct, callers = oldstats[func]
newfunc = func_strip_path(func)
if len(func_std_string(newfunc)) > max_name_len:
max_name_len = len(func_std_string(newfunc))
newcallers = {}
for func2 in callers.keys():
newcallers[func_strip_path(func2)] = \
callers[func2]
if newstats.has_key(newfunc):
newstats[newfunc] = add_func_stats( \
newstats[newfunc],\
(cc, nc, tt, ct, newcallers))
else:
newstats[newfunc] = (cc, nc, tt, ct, newcallers)
old_top = self.top_level
self.top_level = new_top = {}
for func in old_top.keys():
new_top[func_strip_path(func)] = None
self.max_name_len = max_name_len
self.fcn_list = None
self.all_callees = None
return self
for func in other.stats.keys():
if self.stats.has_key(func):
old_func_stat = self.stats[func]
else:
old_func_stat = (0, 0, 0, 0, {},)
self.stats[func] = add_func_stats(old_func_stat, \
other.stats[func])
return self
def calc_callees(self):
if self.all_callees: return
self.all_callees = all_callees = {}
for func in self.stats.keys():
if not all_callees.has_key(func):
all_callees[func] = {}
cc, nc, tt, ct, callers = self.stats[func]
for func2 in callers.keys():
if not all_callees.has_key(func2):
all_callees[func2] = {}
all_callees[func2][func] = callers[func2]
return
# list the tuple indices and directions for sorting,
# along with some printable description
sort_arg_dict_default = {\
"calls" : (((1,-1), ), "call count"),\
"cumulative": (((3,-1), ), "cumulative time"),\
"file" : (((4, 1), ), "file name"),\
"line" : (((5, 1), ), "line number"),\
"module" : (((4, 1), ), "file name"),\
"name" : (((6, 1), ), "function name"),\
"nfl" : (((6, 1),(4, 1),(5, 1),), "name/file/line"), \
"pcalls" : (((0,-1), ), "call count"),\
"stdname" : (((7, 1), ), "standard name"),\
"time" : (((2,-1), ), "internal time"),\
}
#******************************************************************
# The following functions support actual printing of reports
#******************************************************************
def get_sort_arg_defs(self):
"""Expand all abbreviations that are unique."""
if not self.sort_arg_dict:
self.sort_arg_dict = dict = {}
std_list = dict.keys()
bad_list = {}
for word in self.sort_arg_dict_default.keys():
fragment = word
while fragment:
if not fragment:
break
if dict.has_key(fragment):
bad_list[fragment] = 0
break
dict[fragment] = self. \
sort_arg_dict_default[word]
fragment = fragment[:-1]
for word in bad_list.keys():
del dict[word]
return self.sort_arg_dict
# Optional "amount" is either a line count, or a percentage of lines.
def eval_print_amount(self, sel, list, msg):
new_list = list
if type(sel) == type(""):
new_list = []
for func in list:
if re.search(sel, func_std_string(func)):
new_list.append(func)
else:
count = len(list)
if type(sel) == type(1.0) and 0.0 <= sel < 1.0:
count = int (count * sel + .5)
new_list = list[:count]
elif type(sel) == type(1) and 0 <= sel < count:
count = sel
new_list = list[:count]
if len(list) != len(new_list):
msg = msg + " List reduced from " + `len(list)` \
+ " to " + `len(new_list)` + \
" due to restriction <" + `sel` + ">\n"
return new_list, msg
def sort_stats(self, *field):
if not field:
self.fcn_list = 0
return self
if len(field) == 1 and type(field[0]) == type(1):
# Be compatible with old profiler
field = [ {-1: "stdname", \
0:"calls", \
1:"time", \
2: "cumulative" } [ field[0] ] ]
sort_arg_defs = self.get_sort_arg_defs()
sort_tuple = ()
self.sort_type = ""
connector = ""
for word in field:
sort_tuple = sort_tuple + sort_arg_defs[word][0]
self.sort_type = self.sort_type + connector + \
sort_arg_defs[word][1]
connector = ", "
stats_list = []
for func in self.stats.keys():
cc, nc, tt, ct, callers = self.stats[func]
stats_list.append((cc, nc, tt, ct) + func_split(func) \
+ (func_std_string(func), func,) )
stats_list.sort(TupleComp(sort_tuple).compare)
self.fcn_list = fcn_list = []
for tuple in stats_list:
fcn_list.append(tuple[-1])
return self
def reverse_order(self):
if self.fcn_list: self.fcn_list.reverse()
return self
def strip_dirs(self):
oldstats = self.stats
self.stats = newstats = {}
max_name_len = 0
for func in oldstats.keys():
cc, nc, tt, ct, callers = oldstats[func]
newfunc = func_strip_path(func)
if len(func_std_string(newfunc)) > max_name_len:
max_name_len = len(func_std_string(newfunc))
newcallers = {}
for func2 in callers.keys():
newcallers[func_strip_path(func2)] = \
callers[func2]
if newstats.has_key(newfunc):
newstats[newfunc] = add_func_stats( \
newstats[newfunc],\
(cc, nc, tt, ct, newcallers))
else:
newstats[newfunc] = (cc, nc, tt, ct, newcallers)
old_top = self.top_level
self.top_level = new_top = {}
for func in old_top.keys():
new_top[func_strip_path(func)] = None
self.max_name_len = max_name_len
self.fcn_list = None
self.all_callees = None
return self
def get_print_list(self, sel_list):
width = self.max_name_len
if self.fcn_list:
list = self.fcn_list[:]
msg = " Ordered by: " + self.sort_type + '\n'
else:
list = self.stats.keys()
msg = " Random listing order was used\n"
def calc_callees(self):
if self.all_callees: return
self.all_callees = all_callees = {}
for func in self.stats.keys():
if not all_callees.has_key(func):
all_callees[func] = {}
cc, nc, tt, ct, callers = self.stats[func]
for func2 in callers.keys():
if not all_callees.has_key(func2):
all_callees[func2] = {}
all_callees[func2][func] = callers[func2]
return
for selection in sel_list:
list,msg = self.eval_print_amount(selection, list, msg)
#******************************************************************
# The following functions support actual printing of reports
#******************************************************************
count = len(list)
# Optional "amount" is either a line count, or a percentage of lines.
if not list:
return 0, list
print msg
if count < len(self.stats):
width = 0
for func in list:
if len(func_std_string(func)) > width:
width = len(func_std_string(func))
return width+2, list
def print_stats(self, *amount):
for filename in self.files:
print filename
if self.files: print
indent = " "
for func in self.top_level.keys():
print indent, func_get_function_name(func)
print indent, self.total_calls, "function calls",
if self.total_calls != self.prim_calls:
print "(" + `self.prim_calls`, "primitive calls)",
print "in", fpformat.fix(self.total_tt, 3), "CPU seconds"
print
width, list = self.get_print_list(amount)
if list:
self.print_title()
for func in list:
self.print_line(func)
print
print
return self
def eval_print_amount(self, sel, list, msg):
new_list = list
if type(sel) == type(""):
new_list = []
for func in list:
if re.search(sel, func_std_string(func)):
new_list.append(func)
else:
count = len(list)
if type(sel) == type(1.0) and 0.0 <= sel < 1.0:
count = int (count * sel + .5)
new_list = list[:count]
elif type(sel) == type(1) and 0 <= sel < count:
count = sel
new_list = list[:count]
if len(list) != len(new_list):
msg = msg + " List reduced from " + `len(list)` \
+ " to " + `len(new_list)` + \
" due to restriction <" + `sel` + ">\n"
def print_callees(self, *amount):
width, list = self.get_print_list(amount)
if list:
self.calc_callees()
self.print_call_heading(width, "called...")
for func in list:
if self.all_callees.has_key(func):
self.print_call_line(width, \
func, self.all_callees[func])
else:
self.print_call_line(width, func, {})
print
print
return self
def print_callers(self, *amount):
width, list = self.get_print_list(amount)
if list:
self.print_call_heading(width, "was called by...")
for func in list:
cc, nc, tt, ct, callers = self.stats[func]
self.print_call_line(width, func, callers)
print
print
return self
def print_call_heading(self, name_size, column_title):
print string.ljust("Function ", name_size) + column_title
def print_call_line(self, name_size, source, call_dict):
print string.ljust(func_std_string(source), name_size),
if not call_dict:
print "--"
return
clist = call_dict.keys()
clist.sort()
name_size = name_size + 1
indent = ""
for func in clist:
name = func_std_string(func)
print indent*name_size + name + '(' \
+ `call_dict[func]`+')', \
f8(self.stats[func][3])
indent = " "
return new_list, msg
def print_title(self):
print string.rjust('ncalls', 9),
print string.rjust('tottime', 8),
print string.rjust('percall', 8),
print string.rjust('cumtime', 8),
print string.rjust('percall', 8),
print 'filename:lineno(function)'
def get_print_list(self, sel_list):
width = self.max_name_len
if self.fcn_list:
list = self.fcn_list[:]
msg = " Ordered by: " + self.sort_type + '\n'
else:
list = self.stats.keys()
msg = " Random listing order was used\n"
for selection in sel_list:
list,msg = self.eval_print_amount(selection, list, msg)
count = len(list)
if not list:
return 0, list
print msg
if count < len(self.stats):
width = 0
for func in list:
if len(func_std_string(func)) > width:
width = len(func_std_string(func))
return width+2, list
def print_stats(self, *amount):
for filename in self.files:
print filename
if self.files: print
indent = " "
for func in self.top_level.keys():
print indent, func_get_function_name(func)
print indent, self.total_calls, "function calls",
if self.total_calls != self.prim_calls:
print "(" + `self.prim_calls`, "primitive calls)",
print "in", fpformat.fix(self.total_tt, 3), "CPU seconds"
print
width, list = self.get_print_list(amount)
if list:
self.print_title()
for func in list:
self.print_line(func)
print
print
return self
def print_line(self, func): # hack : should print percentages
cc, nc, tt, ct, callers = self.stats[func]
c = `nc`
if nc != cc:
c = c + '/' + `cc`
print string.rjust(c, 9),
print f8(tt),
if nc == 0:
print ' '*8,
else:
print f8(tt/nc),
print f8(ct),
if cc == 0:
print ' '*8,
else:
print f8(ct/cc),
print func_std_string(func)
def print_callees(self, *amount):
width, list = self.get_print_list(amount)
if list:
self.calc_callees()
self.print_call_heading(width, "called...")
for func in list:
if self.all_callees.has_key(func):
self.print_call_line(width, \
func, self.all_callees[func])
else:
self.print_call_line(width, func, {})
print
print
return self
def print_callers(self, *amount):
width, list = self.get_print_list(amount)
if list:
self.print_call_heading(width, "was called by...")
for func in list:
cc, nc, tt, ct, callers = self.stats[func]
self.print_call_line(width, func, callers)
print
print
return self
def print_call_heading(self, name_size, column_title):
print string.ljust("Function ", name_size) + column_title
def ignore(self):
pass # has no return value, so use at end of line :-)
def print_call_line(self, name_size, source, call_dict):
print string.ljust(func_std_string(source), name_size),
if not call_dict:
print "--"
return
clist = call_dict.keys()
clist.sort()
name_size = name_size + 1
indent = ""
for func in clist:
name = func_std_string(func)
print indent*name_size + name + '(' \
+ `call_dict[func]`+')', \
f8(self.stats[func][3])
indent = " "
def print_title(self):
print string.rjust('ncalls', 9),
print string.rjust('tottime', 8),
print string.rjust('percall', 8),
print string.rjust('cumtime', 8),
print string.rjust('percall', 8),
print 'filename:lineno(function)'
def print_line(self, func): # hack : should print percentages
cc, nc, tt, ct, callers = self.stats[func]
c = `nc`
if nc != cc:
c = c + '/' + `cc`
print string.rjust(c, 9),
print f8(tt),
if nc == 0:
print ' '*8,
else:
print f8(tt/nc),
print f8(ct),
if cc == 0:
print ' '*8,
else:
print f8(ct/cc),
print func_std_string(func)
def ignore(self):
pass # has no return value, so use at end of line :-)
class TupleComp:
"""This class provides a generic function for comparing any two tuples.
Each instance records a list of tuple-indices (from most significant
to least significant), and sort direction (ascending or decending) for
each tuple-index. The compare functions can then be used as the function
argument to the system sort() function when a list of tuples need to be
sorted in the instances order."""
"""This class provides a generic function for comparing any two tuples.
Each instance records a list of tuple-indices (from most significant
to least significant), and sort direction (ascending or decending) for
each tuple-index. The compare functions can then be used as the function
argument to the system sort() function when a list of tuples need to be
sorted in the instances order."""
def __init__(self, comp_select_list):
self.comp_select_list = comp_select_list
def __init__(self, comp_select_list):
self.comp_select_list = comp_select_list
def compare (self, left, right):
for index, direction in self.comp_select_list:
l = left[index]
r = right[index]
if l < r:
return -direction
if l > r:
return direction
return 0
def compare (self, left, right):
for index, direction in self.comp_select_list:
l = left[index]
r = right[index]
if l < r:
return -direction
if l > r:
return direction
return 0
#**************************************************************************
def func_strip_path(func_name):
file, line, name = func_name
return os.path.basename(file), line, name
file, line, name = func_name
return os.path.basename(file), line, name
def func_get_function_name(func):
return func[2]
return func[2]
def func_std_string(func_name): # match what old profile produced
file, line, name = func_name
return file + ":" + `line` + "(" + name + ")"
file, line, name = func_name
return file + ":" + `line` + "(" + name + ")"
def func_split(func_name):
return func_name
return func_name
#**************************************************************************
# The following functions combine statists for pairs functions.
# The bulk of the processing involves correctly handling "call" lists,
# such as callers and callees.
# such as callers and callees.
#**************************************************************************
def add_func_stats(target, source):
"""Add together all the stats for two profile entries."""
cc, nc, tt, ct, callers = source
t_cc, t_nc, t_tt, t_ct, t_callers = target
return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct, \
add_callers(t_callers, callers))
"""Add together all the stats for two profile entries."""
cc, nc, tt, ct, callers = source
t_cc, t_nc, t_tt, t_ct, t_callers = target
return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct, \
add_callers(t_callers, callers))
def add_callers(target, source):
"""Combine two caller lists in a single list."""
new_callers = {}
for func in target.keys():
new_callers[func] = target[func]
for func in source.keys():
if new_callers.has_key(func):
new_callers[func] = source[func] + new_callers[func]
else:
new_callers[func] = source[func]
return new_callers
"""Combine two caller lists in a single list."""
new_callers = {}
for func in target.keys():
new_callers[func] = target[func]
for func in source.keys():
if new_callers.has_key(func):
new_callers[func] = source[func] + new_callers[func]
else:
new_callers[func] = source[func]
return new_callers
def count_calls(callers):
"""Sum the caller statistics to get total number of calls received."""
nc = 0
for func in callers.keys():
nc = nc + callers[func]
return nc
"""Sum the caller statistics to get total number of calls received."""
nc = 0
for func in callers.keys():
nc = nc + callers[func]
return nc
#**************************************************************************
# The following functions support printing of reports
#**************************************************************************
def f8(x):
return string.rjust(fpformat.fix(x, 3), 8)
return string.rjust(fpformat.fix(x, 3), 8)

View File

@ -1,9 +1,9 @@
"""Pseudo terminal utilities."""
# Bugs: No signal handling. Doesn't set slave termios and window size.
# Only tested on Linux.
# See: W. Richard Stevens. 1992. Advanced Programming in the
# UNIX Environment. Chapter 19.
# Only tested on Linux.
# See: W. Richard Stevens. 1992. Advanced Programming in the
# UNIX Environment. Chapter 19.
# Author: Steen Lumholt -- with additions by Guido.
from select import select
@ -17,133 +17,133 @@ STDERR_FILENO = 2
CHILD = 0
def openpty():
"""openpty() -> (master_fd, slave_fd)
Open a pty master/slave pair, using os.openpty() if possible."""
"""openpty() -> (master_fd, slave_fd)
Open a pty master/slave pair, using os.openpty() if possible."""
try:
return os.openpty()
except (AttributeError, OSError):
pass
master_fd, slave_name = _open_terminal()
slave_fd = slave_open(slave_name)
return master_fd, slave_fd
try:
return os.openpty()
except (AttributeError, OSError):
pass
master_fd, slave_name = _open_terminal()
slave_fd = slave_open(slave_name)
return master_fd, slave_fd
def master_open():
"""master_open() -> (master_fd, slave_name)
Open a pty master and return the fd, and the filename of the slave end.
Deprecated, use openpty() instead."""
"""master_open() -> (master_fd, slave_name)
Open a pty master and return the fd, and the filename of the slave end.
Deprecated, use openpty() instead."""
try:
master_fd, slave_fd = os.openpty()
except (AttributeError, OSError):
pass
else:
slave_name = os.ttyname(slave_fd)
os.close(slave_fd)
return master_fd, slave_name
try:
master_fd, slave_fd = os.openpty()
except (AttributeError, OSError):
pass
else:
slave_name = os.ttyname(slave_fd)
os.close(slave_fd)
return master_fd, slave_name
return _open_terminal()
return _open_terminal()
def _open_terminal():
"""Open pty master and return (master_fd, tty_name).
SGI and generic BSD version, for when openpty() fails."""
try:
import sgi
except ImportError:
pass
else:
try:
tty_name, master_fd = sgi._getpty(FCNTL.O_RDWR, 0666, 0)
except IOError, msg:
raise os.error, msg
return master_fd, tty_name
for x in 'pqrstuvwxyzPQRST':
for y in '0123456789abcdef':
pty_name = '/dev/pty' + x + y
try:
fd = os.open(pty_name, FCNTL.O_RDWR)
except os.error:
continue
return (fd, '/dev/tty' + x + y)
raise os.error, 'out of pty devices'
"""Open pty master and return (master_fd, tty_name).
SGI and generic BSD version, for when openpty() fails."""
try:
import sgi
except ImportError:
pass
else:
try:
tty_name, master_fd = sgi._getpty(FCNTL.O_RDWR, 0666, 0)
except IOError, msg:
raise os.error, msg
return master_fd, tty_name
for x in 'pqrstuvwxyzPQRST':
for y in '0123456789abcdef':
pty_name = '/dev/pty' + x + y
try:
fd = os.open(pty_name, FCNTL.O_RDWR)
except os.error:
continue
return (fd, '/dev/tty' + x + y)
raise os.error, 'out of pty devices'
def slave_open(tty_name):
"""slave_open(tty_name) -> slave_fd
Open the pty slave and acquire the controlling terminal, returning
opened filedescriptor.
Deprecated, use openpty() instead."""
"""slave_open(tty_name) -> slave_fd
Open the pty slave and acquire the controlling terminal, returning
opened filedescriptor.
Deprecated, use openpty() instead."""
return os.open(tty_name, FCNTL.O_RDWR)
return os.open(tty_name, FCNTL.O_RDWR)
def fork():
"""fork() -> (pid, master_fd)
Fork and make the child a session leader with a controlling terminal."""
"""fork() -> (pid, master_fd)
Fork and make the child a session leader with a controlling terminal."""
try:
pid, fd = os.forkpty()
except (AttributeError, OSError):
pass
else:
if pid == CHILD:
try:
os.setsid()
except OSError:
# os.forkpty() already set us session leader
pass
return pid, fd
try:
pid, fd = os.forkpty()
except (AttributeError, OSError):
pass
else:
if pid == CHILD:
try:
os.setsid()
except OSError:
# os.forkpty() already set us session leader
pass
return pid, fd
master_fd, slave_fd = openpty()
pid = os.fork()
if pid == CHILD:
# Establish a new session.
os.setsid()
os.close(master_fd)
master_fd, slave_fd = openpty()
pid = os.fork()
if pid == CHILD:
# Establish a new session.
os.setsid()
os.close(master_fd)
# Slave becomes stdin/stdout/stderr of child.
os.dup2(slave_fd, STDIN_FILENO)
os.dup2(slave_fd, STDOUT_FILENO)
os.dup2(slave_fd, STDERR_FILENO)
if (slave_fd > STDERR_FILENO):
os.close (slave_fd)
# Slave becomes stdin/stdout/stderr of child.
os.dup2(slave_fd, STDIN_FILENO)
os.dup2(slave_fd, STDOUT_FILENO)
os.dup2(slave_fd, STDERR_FILENO)
if (slave_fd > STDERR_FILENO):
os.close (slave_fd)
# Parent and child process.
return pid, master_fd
# Parent and child process.
return pid, master_fd
def _writen(fd, data):
"""Write all the data to a descriptor."""
while data != '':
n = os.write(fd, data)
data = data[n:]
"""Write all the data to a descriptor."""
while data != '':
n = os.write(fd, data)
data = data[n:]
def _read(fd):
"""Default read function."""
return os.read(fd, 1024)
"""Default read function."""
return os.read(fd, 1024)
def _copy(master_fd, master_read=_read, stdin_read=_read):
"""Parent copy loop.
Copies
pty master -> standard output (master_read)
standard input -> pty master (stdin_read)"""
while 1:
rfds, wfds, xfds = select(
[master_fd, STDIN_FILENO], [], [])
if master_fd in rfds:
data = master_read(master_fd)
os.write(STDOUT_FILENO, data)
if STDIN_FILENO in rfds:
data = stdin_read(STDIN_FILENO)
_writen(master_fd, data)
"""Parent copy loop.
Copies
pty master -> standard output (master_read)
standard input -> pty master (stdin_read)"""
while 1:
rfds, wfds, xfds = select(
[master_fd, STDIN_FILENO], [], [])
if master_fd in rfds:
data = master_read(master_fd)
os.write(STDOUT_FILENO, data)
if STDIN_FILENO in rfds:
data = stdin_read(STDIN_FILENO)
_writen(master_fd, data)
def spawn(argv, master_read=_read, stdin_read=_read):
"""Create a spawned process."""
if type(argv) == type(''):
argv = (argv,)
pid, master_fd = fork()
if pid == CHILD:
apply(os.execlp, (argv[0],) + argv)
mode = tty.tcgetattr(STDIN_FILENO)
tty.setraw(STDIN_FILENO)
try:
_copy(master_fd, master_read, stdin_read)
except:
tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
"""Create a spawned process."""
if type(argv) == type(''):
argv = (argv,)
pid, master_fd = fork()
if pid == CHILD:
apply(os.execlp, (argv[0],) + argv)
mode = tty.tcgetattr(STDIN_FILENO)
tty.setraw(STDIN_FILENO)
try:
_copy(master_fd, master_read, stdin_read)
except:
tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)

View File

@ -4,7 +4,7 @@ Parse enough of a Python file to recognize class and method
definitions and to find out the superclasses of a class.
The interface consists of a single function:
readmodule(module, path)
readmodule(module, path)
module is the name of a Python module, path is an optional list of
directories where the module is to be searched. If present, path is
prepended to the system search path sys.path.
@ -15,11 +15,11 @@ are class instances of the class Class defined here.
A class is described by the class Class in this module. Instances
of this class have the following instance variables:
name -- the name of the class
super -- a list of super classes (Class instances)
methods -- a dictionary of methods
file -- the file in which the class was defined
lineno -- the line in the file on which the class statement occurred
name -- the name of the class
super -- a list of super classes (Class instances)
methods -- a dictionary of methods
file -- the file in which the class was defined
lineno -- the line in the file on which the class statement occurred
The dictionary of methods uses the method names as keys and the line
numbers on which the method was defined as values.
If the name of a super class is not recognized, the corresponding
@ -64,52 +64,52 @@ TABWIDTH = 8
_getnext = re.compile(r"""
(?P<String>
\""" [^"\\]* (?:
(?: \\. | "(?!"") )
[^"\\]*
)*
(?: \\. | "(?!"") )
[^"\\]*
)*
\"""
| ''' [^'\\]* (?:
(?: \\. | '(?!'') )
[^'\\]*
)*
'''
(?: \\. | '(?!'') )
[^'\\]*
)*
'''
)
| (?P<Method>
^
(?P<MethodIndent> [ \t]* )
def [ \t]+
(?P<MethodName> [a-zA-Z_] \w* )
[ \t]* \(
^
(?P<MethodIndent> [ \t]* )
def [ \t]+
(?P<MethodName> [a-zA-Z_] \w* )
[ \t]* \(
)
| (?P<Class>
^
(?P<ClassIndent> [ \t]* )
class [ \t]+
(?P<ClassName> [a-zA-Z_] \w* )
[ \t]*
(?P<ClassSupers> \( [^)\n]* \) )?
[ \t]* :
^
(?P<ClassIndent> [ \t]* )
class [ \t]+
(?P<ClassName> [a-zA-Z_] \w* )
[ \t]*
(?P<ClassSupers> \( [^)\n]* \) )?
[ \t]* :
)
| (?P<Import>
^ import [ \t]+
(?P<ImportList> [^#;\n]+ )
^ import [ \t]+
(?P<ImportList> [^#;\n]+ )
)
| (?P<ImportFrom>
^ from [ \t]+
(?P<ImportFromPath>
[a-zA-Z_] \w*
(?:
[ \t]* \. [ \t]* [a-zA-Z_] \w*
)*
)
[ \t]+
import [ \t]+
(?P<ImportFromList> [^#;\n]+ )
^ from [ \t]+
(?P<ImportFromPath>
[a-zA-Z_] \w*
(?:
[ \t]* \. [ \t]* [a-zA-Z_] \w*
)*
)
[ \t]+
import [ \t]+
(?P<ImportFromList> [^#;\n]+ )
)
""", re.VERBOSE | re.DOTALL | re.MULTILINE).search
@ -117,220 +117,220 @@ _modules = {} # cache of modules we've seen
# each Python class is represented by an instance of this class
class Class:
'''Class to represent a Python class.'''
def __init__(self, module, name, super, file, lineno):
self.module = module
self.name = name
if super is None:
super = []
self.super = super
self.methods = {}
self.file = file
self.lineno = lineno
'''Class to represent a Python class.'''
def __init__(self, module, name, super, file, lineno):
self.module = module
self.name = name
if super is None:
super = []
self.super = super
self.methods = {}
self.file = file
self.lineno = lineno
def _addmethod(self, name, lineno):
self.methods[name] = lineno
def _addmethod(self, name, lineno):
self.methods[name] = lineno
class Function(Class):
'''Class to represent a top-level Python function'''
def __init__(self, module, name, file, lineno):
Class.__init__(self, module, name, None, file, lineno)
def _addmethod(self, name, lineno):
assert 0, "Function._addmethod() shouldn't be called"
'''Class to represent a top-level Python function'''
def __init__(self, module, name, file, lineno):
Class.__init__(self, module, name, None, file, lineno)
def _addmethod(self, name, lineno):
assert 0, "Function._addmethod() shouldn't be called"
def readmodule(module, path=[], inpackage=0):
'''Backwards compatible interface.
'''Backwards compatible interface.
Like readmodule_ex() but strips Function objects from the
resulting dictionary.'''
Like readmodule_ex() but strips Function objects from the
resulting dictionary.'''
dict = readmodule_ex(module, path, inpackage)
res = {}
for key, value in dict.items():
if not isinstance(value, Function):
res[key] = value
return res
dict = readmodule_ex(module, path, inpackage)
res = {}
for key, value in dict.items():
if not isinstance(value, Function):
res[key] = value
return res
def readmodule_ex(module, path=[], inpackage=0):
'''Read a module file and return a dictionary of classes.
'''Read a module file and return a dictionary of classes.
Search for MODULE in PATH and sys.path, read and parse the
module and return a dictionary with one entry for each class
found in the module.'''
Search for MODULE in PATH and sys.path, read and parse the
module and return a dictionary with one entry for each class
found in the module.'''
dict = {}
dict = {}
i = string.rfind(module, '.')
if i >= 0:
# Dotted module name
package = string.strip(module[:i])
submodule = string.strip(module[i+1:])
parent = readmodule(package, path, inpackage)
child = readmodule(submodule, parent['__path__'], 1)
return child
i = string.rfind(module, '.')
if i >= 0:
# Dotted module name
package = string.strip(module[:i])
submodule = string.strip(module[i+1:])
parent = readmodule(package, path, inpackage)
child = readmodule(submodule, parent['__path__'], 1)
return child
if _modules.has_key(module):
# we've seen this module before...
return _modules[module]
if module in sys.builtin_module_names:
# this is a built-in module
_modules[module] = dict
return dict
if _modules.has_key(module):
# we've seen this module before...
return _modules[module]
if module in sys.builtin_module_names:
# this is a built-in module
_modules[module] = dict
return dict
# search the path for the module
f = None
if inpackage:
try:
f, file, (suff, mode, type) = \
imp.find_module(module, path)
except ImportError:
f = None
if f is None:
fullpath = list(path) + sys.path
f, file, (suff, mode, type) = imp.find_module(module, fullpath)
if type == imp.PKG_DIRECTORY:
dict['__path__'] = [file]
_modules[module] = dict
path = [file] + path
f, file, (suff, mode, type) = \
imp.find_module('__init__', [file])
if type != imp.PY_SOURCE:
# not Python source, can't do anything with this module
f.close()
_modules[module] = dict
return dict
# search the path for the module
f = None
if inpackage:
try:
f, file, (suff, mode, type) = \
imp.find_module(module, path)
except ImportError:
f = None
if f is None:
fullpath = list(path) + sys.path
f, file, (suff, mode, type) = imp.find_module(module, fullpath)
if type == imp.PKG_DIRECTORY:
dict['__path__'] = [file]
_modules[module] = dict
path = [file] + path
f, file, (suff, mode, type) = \
imp.find_module('__init__', [file])
if type != imp.PY_SOURCE:
# not Python source, can't do anything with this module
f.close()
_modules[module] = dict
return dict
_modules[module] = dict
imports = []
classstack = [] # stack of (class, indent) pairs
src = f.read()
f.close()
_modules[module] = dict
imports = []
classstack = [] # stack of (class, indent) pairs
src = f.read()
f.close()
# To avoid having to stop the regexp at each newline, instead
# when we need a line number we simply string.count the number of
# newlines in the string since the last time we did this; i.e.,
# lineno = lineno + \
# string.count(src, '\n', last_lineno_pos, here)
# last_lineno_pos = here
countnl = string.count
lineno, last_lineno_pos = 1, 0
i = 0
while 1:
m = _getnext(src, i)
if not m:
break
start, i = m.span()
# To avoid having to stop the regexp at each newline, instead
# when we need a line number we simply string.count the number of
# newlines in the string since the last time we did this; i.e.,
# lineno = lineno + \
# string.count(src, '\n', last_lineno_pos, here)
# last_lineno_pos = here
countnl = string.count
lineno, last_lineno_pos = 1, 0
i = 0
while 1:
m = _getnext(src, i)
if not m:
break
start, i = m.span()
if m.start("Method") >= 0:
# found a method definition or function
thisindent = _indent(m.group("MethodIndent"))
meth_name = m.group("MethodName")
lineno = lineno + \
countnl(src, '\n',
last_lineno_pos, start)
last_lineno_pos = start
# close all classes indented at least as much
while classstack and \
classstack[-1][1] >= thisindent:
del classstack[-1]
if classstack:
# it's a class method
cur_class = classstack[-1][0]
cur_class._addmethod(meth_name, lineno)
else:
# it's a function
f = Function(module, meth_name,
file, lineno)
dict[meth_name] = f
if m.start("Method") >= 0:
# found a method definition or function
thisindent = _indent(m.group("MethodIndent"))
meth_name = m.group("MethodName")
lineno = lineno + \
countnl(src, '\n',
last_lineno_pos, start)
last_lineno_pos = start
# close all classes indented at least as much
while classstack and \
classstack[-1][1] >= thisindent:
del classstack[-1]
if classstack:
# it's a class method
cur_class = classstack[-1][0]
cur_class._addmethod(meth_name, lineno)
else:
# it's a function
f = Function(module, meth_name,
file, lineno)
dict[meth_name] = f
elif m.start("String") >= 0:
pass
elif m.start("String") >= 0:
pass
elif m.start("Class") >= 0:
# we found a class definition
thisindent = _indent(m.group("ClassIndent"))
# close all classes indented at least as much
while classstack and \
classstack[-1][1] >= thisindent:
del classstack[-1]
lineno = lineno + \
countnl(src, '\n', last_lineno_pos, start)
last_lineno_pos = start
class_name = m.group("ClassName")
inherit = m.group("ClassSupers")
if inherit:
# the class inherits from other classes
inherit = string.strip(inherit[1:-1])
names = []
for n in string.splitfields(inherit, ','):
n = string.strip(n)
if dict.has_key(n):
# we know this super class
n = dict[n]
else:
c = string.splitfields(n, '.')
if len(c) > 1:
# super class
# is of the
# form module.class:
# look in
# module for class
m = c[-2]
c = c[-1]
if _modules.has_key(m):
d = _modules[m]
if d.has_key(c):
n = d[c]
names.append(n)
inherit = names
# remember this class
cur_class = Class(module, class_name, inherit,
file, lineno)
dict[class_name] = cur_class
classstack.append((cur_class, thisindent))
elif m.start("Class") >= 0:
# we found a class definition
thisindent = _indent(m.group("ClassIndent"))
# close all classes indented at least as much
while classstack and \
classstack[-1][1] >= thisindent:
del classstack[-1]
lineno = lineno + \
countnl(src, '\n', last_lineno_pos, start)
last_lineno_pos = start
class_name = m.group("ClassName")
inherit = m.group("ClassSupers")
if inherit:
# the class inherits from other classes
inherit = string.strip(inherit[1:-1])
names = []
for n in string.splitfields(inherit, ','):
n = string.strip(n)
if dict.has_key(n):
# we know this super class
n = dict[n]
else:
c = string.splitfields(n, '.')
if len(c) > 1:
# super class
# is of the
# form module.class:
# look in
# module for class
m = c[-2]
c = c[-1]
if _modules.has_key(m):
d = _modules[m]
if d.has_key(c):
n = d[c]
names.append(n)
inherit = names
# remember this class
cur_class = Class(module, class_name, inherit,
file, lineno)
dict[class_name] = cur_class
classstack.append((cur_class, thisindent))
elif m.start("Import") >= 0:
# import module
for n in string.split(m.group("ImportList"), ','):
n = string.strip(n)
try:
# recursively read the imported module
d = readmodule(n, path, inpackage)
except:
##print 'module', n, 'not found'
pass
elif m.start("Import") >= 0:
# import module
for n in string.split(m.group("ImportList"), ','):
n = string.strip(n)
try:
# recursively read the imported module
d = readmodule(n, path, inpackage)
except:
##print 'module', n, 'not found'
pass
elif m.start("ImportFrom") >= 0:
# from module import stuff
mod = m.group("ImportFromPath")
names = string.split(m.group("ImportFromList"), ',')
try:
# recursively read the imported module
d = readmodule(mod, path, inpackage)
except:
##print 'module', mod, 'not found'
continue
# add any classes that were defined in the
# imported module to our name space if they
# were mentioned in the list
for n in names:
n = string.strip(n)
if d.has_key(n):
dict[n] = d[n]
elif n == '*':
# only add a name if not
# already there (to mimic what
# Python does internally)
# also don't add names that
# start with _
for n in d.keys():
if n[0] != '_' and \
not dict.has_key(n):
dict[n] = d[n]
else:
assert 0, "regexp _getnext found something unexpected"
elif m.start("ImportFrom") >= 0:
# from module import stuff
mod = m.group("ImportFromPath")
names = string.split(m.group("ImportFromList"), ',')
try:
# recursively read the imported module
d = readmodule(mod, path, inpackage)
except:
##print 'module', mod, 'not found'
continue
# add any classes that were defined in the
# imported module to our name space if they
# were mentioned in the list
for n in names:
n = string.strip(n)
if d.has_key(n):
dict[n] = d[n]
elif n == '*':
# only add a name if not
# already there (to mimic what
# Python does internally)
# also don't add names that
# start with _
for n in d.keys():
if n[0] != '_' and \
not dict.has_key(n):
dict[n] = d[n]
else:
assert 0, "regexp _getnext found something unexpected"
return dict
return dict
def _indent(ws, _expandtabs=string.expandtabs):
return len(_expandtabs(ws, TABWIDTH))
return len(_expandtabs(ws, TABWIDTH))

View File

@ -26,7 +26,7 @@ def encode(input, output, quotetabs):
'input' and 'output' are files with readline() and write() methods.
The 'quotetabs' flag indicates whether tabs should be quoted.
"""
"""
while 1:
line = input.readline()
if not line: