2010-02-06 21:37:08 -04:00
|
|
|
# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
|
2002-11-13 12:15:58 -04:00
|
|
|
#
|
|
|
|
# Permission to use, copy, modify, and distribute this software and its
|
|
|
|
# documentation for any purpose and without fee is hereby granted,
|
|
|
|
# provided that the above copyright notice appear in all copies and that
|
|
|
|
# both that copyright notice and this permission notice appear in
|
|
|
|
# supporting documentation, and that the name of Vinay Sajip
|
|
|
|
# not be used in advertising or publicity pertaining to distribution
|
|
|
|
# of the software without specific, written prior permission.
|
|
|
|
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
|
|
|
|
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
|
|
|
|
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
|
|
|
|
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
|
|
|
|
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
|
|
|
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
|
|
|
|
|
|
"""
|
|
|
|
Logging package for Python. Based on PEP 282 and comments thereto in
|
|
|
|
comp.lang.python, and influenced by Apache's log4j system.
|
|
|
|
|
2010-01-01 09:07:05 -04:00
|
|
|
Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
To use, simply 'import logging' and log away!
|
|
|
|
"""
|
|
|
|
|
2009-11-25 05:03:30 -04:00
|
|
|
import sys, os, time, cStringIO, traceback, warnings, weakref
|
2009-10-10 17:32:36 -03:00
|
|
|
|
2008-02-15 21:22:54 -04:00
|
|
|
__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
|
2009-04-27 10:44:27 -03:00
|
|
|
'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
|
|
|
|
'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
|
|
|
|
'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
|
|
|
|
'captureWarnings', 'critical', 'debug', 'disable', 'error',
|
|
|
|
'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
|
|
|
|
'info', 'log', 'makeLogRecord', 'setLoggerClass', 'warn', 'warning']
|
2008-02-15 21:22:54 -04:00
|
|
|
|
2005-03-13 05:54:31 -04:00
|
|
|
try:
|
|
|
|
import codecs
|
|
|
|
except ImportError:
|
|
|
|
codecs = None
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
try:
|
|
|
|
import thread
|
|
|
|
import threading
|
|
|
|
except ImportError:
|
|
|
|
thread = None
|
|
|
|
|
|
|
|
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
|
2006-05-18 04:28:58 -03:00
|
|
|
__status__ = "production"
|
2010-02-06 21:37:08 -04:00
|
|
|
__version__ = "0.5.1.2"
|
|
|
|
__date__ = "07 February 2010"
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Miscellaneous module data
|
|
|
|
#---------------------------------------------------------------------------
|
2009-10-10 17:32:36 -03:00
|
|
|
try:
|
|
|
|
unicode
|
|
|
|
_unicode = True
|
|
|
|
except NameError:
|
|
|
|
_unicode = False
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
#
|
2005-02-18 07:53:32 -04:00
|
|
|
# _srcfile is used when walking the stack to check when we've got the first
|
2002-11-13 12:15:58 -04:00
|
|
|
# caller stack frame.
|
2003-02-18 10:20:07 -04:00
|
|
|
#
|
2005-09-02 08:20:33 -03:00
|
|
|
if hasattr(sys, 'frozen'): #support for py2exe
|
|
|
|
_srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
|
2009-10-10 17:32:36 -03:00
|
|
|
elif __file__[-4:].lower() in ['.pyc', '.pyo']:
|
2002-11-13 12:18:29 -04:00
|
|
|
_srcfile = __file__[:-4] + '.py'
|
2002-11-13 12:15:58 -04:00
|
|
|
else:
|
2002-11-13 12:18:29 -04:00
|
|
|
_srcfile = __file__
|
|
|
|
_srcfile = os.path.normcase(_srcfile)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2005-02-18 07:53:32 -04:00
|
|
|
# next bit filched from 1.5.2's inspect.py
|
|
|
|
def currentframe():
|
|
|
|
"""Return the frame object for the caller's stack frame."""
|
|
|
|
try:
|
2005-10-21 03:00:24 -03:00
|
|
|
raise Exception
|
2005-02-18 07:53:32 -04:00
|
|
|
except:
|
2009-10-10 17:32:36 -03:00
|
|
|
return sys.exc_info()[2].tb_frame.f_back
|
2005-02-18 07:53:32 -04:00
|
|
|
|
2007-02-16 18:36:24 -04:00
|
|
|
if hasattr(sys, '_getframe'): currentframe = lambda: sys._getframe(3)
|
2005-02-18 07:53:32 -04:00
|
|
|
# done filching
|
|
|
|
|
2003-01-23 14:29:29 -04:00
|
|
|
# _srcfile is only used in conjunction with sys._getframe().
|
|
|
|
# To provide compatibility with older versions of Python, set _srcfile
|
|
|
|
# to None if _getframe() is not available; this value will prevent
|
|
|
|
# findCaller() from being called.
|
2005-02-18 07:53:32 -04:00
|
|
|
#if not hasattr(sys, "_getframe"):
|
|
|
|
# _srcfile = None
|
2003-01-23 14:29:29 -04:00
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
#
|
|
|
|
#_startTime is used as the base when calculating the relative time of events
|
|
|
|
#
|
|
|
|
_startTime = time.time()
|
|
|
|
|
|
|
|
#
|
|
|
|
#raiseExceptions is used to see if exceptions during handling should be
|
|
|
|
#propagated
|
|
|
|
#
|
|
|
|
raiseExceptions = 1
|
|
|
|
|
2006-03-13 18:05:28 -04:00
|
|
|
#
|
|
|
|
# If you don't want threading information in the log, set this to zero
|
|
|
|
#
|
|
|
|
logThreads = 1
|
|
|
|
|
2009-01-18 17:04:36 -04:00
|
|
|
#
|
|
|
|
# If you don't want multiprocessing information in the log, set this to zero
|
|
|
|
#
|
|
|
|
logMultiprocessing = 1
|
|
|
|
|
2006-03-13 18:05:28 -04:00
|
|
|
#
|
|
|
|
# If you don't want process information in the log, set this to zero
|
|
|
|
#
|
|
|
|
logProcesses = 1
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Level related stuff
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Default levels and level names, these can be replaced with any positive set
|
|
|
|
# of values having corresponding names. There is a pseudo-level, NOTSET, which
|
|
|
|
# is only really there as a lower limit for user-defined levels. Handlers and
|
|
|
|
# loggers are initialized with NOTSET so that they will log all messages, even
|
|
|
|
# at user-defined levels.
|
|
|
|
#
|
2005-03-13 05:54:31 -04:00
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
CRITICAL = 50
|
|
|
|
FATAL = CRITICAL
|
|
|
|
ERROR = 40
|
2003-02-18 10:20:07 -04:00
|
|
|
WARNING = 30
|
|
|
|
WARN = WARNING
|
2002-11-13 12:15:58 -04:00
|
|
|
INFO = 20
|
|
|
|
DEBUG = 10
|
|
|
|
NOTSET = 0
|
|
|
|
|
|
|
|
_levelNames = {
|
|
|
|
CRITICAL : 'CRITICAL',
|
|
|
|
ERROR : 'ERROR',
|
2003-02-18 10:20:07 -04:00
|
|
|
WARNING : 'WARNING',
|
2002-11-13 12:15:58 -04:00
|
|
|
INFO : 'INFO',
|
|
|
|
DEBUG : 'DEBUG',
|
|
|
|
NOTSET : 'NOTSET',
|
|
|
|
'CRITICAL' : CRITICAL,
|
|
|
|
'ERROR' : ERROR,
|
2003-02-18 10:20:07 -04:00
|
|
|
'WARN' : WARNING,
|
|
|
|
'WARNING' : WARNING,
|
2002-11-13 12:15:58 -04:00
|
|
|
'INFO' : INFO,
|
|
|
|
'DEBUG' : DEBUG,
|
|
|
|
'NOTSET' : NOTSET,
|
|
|
|
}
|
|
|
|
|
|
|
|
def getLevelName(level):
|
|
|
|
"""
|
|
|
|
Return the textual representation of logging level 'level'.
|
|
|
|
|
2003-02-18 10:20:07 -04:00
|
|
|
If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
|
2002-11-13 12:15:58 -04:00
|
|
|
INFO, DEBUG) then you get the corresponding string. If you have
|
|
|
|
associated levels with names using addLevelName then the name you have
|
2004-07-03 08:47:26 -03:00
|
|
|
associated with 'level' is returned.
|
|
|
|
|
|
|
|
If a numeric value corresponding to one of the defined levels is passed
|
|
|
|
in, the corresponding string representation is returned.
|
|
|
|
|
|
|
|
Otherwise, the string "Level %s" % level is returned.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
return _levelNames.get(level, ("Level %s" % level))
|
|
|
|
|
|
|
|
def addLevelName(level, levelName):
|
|
|
|
"""
|
|
|
|
Associate 'levelName' with 'level'.
|
|
|
|
|
|
|
|
This is used when converting levels to text during message formatting.
|
|
|
|
"""
|
|
|
|
_acquireLock()
|
|
|
|
try: #unlikely to cause an exception, but you never know...
|
|
|
|
_levelNames[level] = levelName
|
|
|
|
_levelNames[levelName] = level
|
|
|
|
finally:
|
|
|
|
_releaseLock()
|
|
|
|
|
2009-07-13 08:21:05 -03:00
|
|
|
def _checkLevel(level):
|
|
|
|
if isinstance(level, int):
|
|
|
|
rv = level
|
|
|
|
elif str(level) == level:
|
|
|
|
if level not in _levelNames:
|
|
|
|
raise ValueError("Unknown level: %r" % level)
|
|
|
|
rv = _levelNames[level]
|
|
|
|
else:
|
|
|
|
raise TypeError("Level not an integer or a valid string: %r" % level)
|
|
|
|
return rv
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Thread-related stuff
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
#
|
|
|
|
#_lock is used to serialize access to shared data structures in this module.
|
2009-11-24 11:53:25 -04:00
|
|
|
#This needs to be an RLock because fileConfig() creates and configures
|
|
|
|
#Handlers, and so might arbitrary user threads. Since Handler code updates the
|
|
|
|
#shared dictionary _handlers, it needs to acquire the lock. But if configuring,
|
2002-11-13 12:15:58 -04:00
|
|
|
#the lock would already have been acquired - so we need an RLock.
|
|
|
|
#The same argument applies to Loggers and Manager.loggerDict.
|
|
|
|
#
|
2009-11-27 10:03:36 -04:00
|
|
|
if thread:
|
|
|
|
_lock = threading.RLock()
|
|
|
|
else:
|
|
|
|
_lock = None
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def _acquireLock():
|
|
|
|
"""
|
|
|
|
Acquire the module-level lock for serializing access to shared data.
|
|
|
|
|
|
|
|
This should be released with _releaseLock().
|
|
|
|
"""
|
|
|
|
if _lock:
|
|
|
|
_lock.acquire()
|
|
|
|
|
|
|
|
def _releaseLock():
|
|
|
|
"""
|
|
|
|
Release the module-level lock acquired by calling _acquireLock().
|
|
|
|
"""
|
|
|
|
if _lock:
|
|
|
|
_lock.release()
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# The logging record
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class LogRecord(object):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
A LogRecord instance represents an event being logged.
|
|
|
|
|
|
|
|
LogRecord instances are created every time something is logged. They
|
|
|
|
contain all the information pertinent to the event being logged. The
|
|
|
|
main information passed in is in msg and args, which are combined
|
|
|
|
using str(msg) % args to create the message field of the record. The
|
|
|
|
record also includes information such as when the record was created,
|
|
|
|
the source line where the logging call was made, and any exception
|
|
|
|
information to be logged.
|
|
|
|
"""
|
2006-02-09 04:48:36 -04:00
|
|
|
def __init__(self, name, level, pathname, lineno,
|
2006-10-03 15:21:56 -03:00
|
|
|
msg, args, exc_info, func=None):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Initialize a logging record with interesting information.
|
|
|
|
"""
|
|
|
|
ct = time.time()
|
|
|
|
self.name = name
|
|
|
|
self.msg = msg
|
2004-10-20 05:39:40 -03:00
|
|
|
#
|
|
|
|
# The following statement allows passing of a dictionary as a sole
|
|
|
|
# argument, so that you can do something like
|
|
|
|
# logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
|
|
|
|
# Suggested by Stefan Behnel.
|
|
|
|
# Note that without the test for args[0], we get a problem because
|
|
|
|
# during formatting, we test to see if the arg is present using
|
|
|
|
# 'if self.args:'. If the event being logged is e.g. 'Value is %d'
|
|
|
|
# and if the passed arg fails 'if self.args:' then no formatting
|
|
|
|
# is done. For example, logger.warn('Value is %d', 0) would log
|
|
|
|
# 'Value is %d' instead of 'Value is 0'.
|
|
|
|
# For the use case of passing a dictionary, this should not be a
|
|
|
|
# problem.
|
2009-10-10 17:32:36 -03:00
|
|
|
if args and len(args) == 1 and isinstance(args[0], dict) and args[0]:
|
2004-10-20 05:39:40 -03:00
|
|
|
args = args[0]
|
2002-11-13 12:15:58 -04:00
|
|
|
self.args = args
|
|
|
|
self.levelname = getLevelName(level)
|
|
|
|
self.levelno = level
|
|
|
|
self.pathname = pathname
|
|
|
|
try:
|
|
|
|
self.filename = os.path.basename(pathname)
|
|
|
|
self.module = os.path.splitext(self.filename)[0]
|
2007-01-08 14:51:46 -04:00
|
|
|
except (TypeError, ValueError, AttributeError):
|
2002-11-13 12:15:58 -04:00
|
|
|
self.filename = pathname
|
|
|
|
self.module = "Unknown module"
|
|
|
|
self.exc_info = exc_info
|
2004-02-20 09:18:36 -04:00
|
|
|
self.exc_text = None # used to cache the traceback text
|
2002-11-13 12:15:58 -04:00
|
|
|
self.lineno = lineno
|
2006-02-09 04:48:36 -04:00
|
|
|
self.funcName = func
|
2002-11-13 12:15:58 -04:00
|
|
|
self.created = ct
|
|
|
|
self.msecs = (ct - long(ct)) * 1000
|
|
|
|
self.relativeCreated = (self.created - _startTime) * 1000
|
2006-03-13 18:05:28 -04:00
|
|
|
if logThreads and thread:
|
2002-11-13 12:15:58 -04:00
|
|
|
self.thread = thread.get_ident()
|
2008-08-18 15:01:43 -03:00
|
|
|
self.threadName = threading.current_thread().name
|
2002-11-13 12:15:58 -04:00
|
|
|
else:
|
|
|
|
self.thread = None
|
2005-03-31 16:16:55 -04:00
|
|
|
self.threadName = None
|
2009-09-29 04:08:54 -03:00
|
|
|
if not logMultiprocessing:
|
2009-01-18 17:04:36 -04:00
|
|
|
self.processName = None
|
2009-09-29 04:08:54 -03:00
|
|
|
else:
|
2010-03-22 09:33:08 -03:00
|
|
|
self.processName = 'MainProcess'
|
|
|
|
mp = sys.modules.get('multiprocessing')
|
|
|
|
if mp is not None:
|
|
|
|
# Errors may occur if multiprocessing has not finished loading
|
|
|
|
# yet - e.g. if a custom import hook causes third-party code
|
|
|
|
# to run when multiprocessing calls import. See issue 8200
|
|
|
|
# for an example
|
|
|
|
try:
|
|
|
|
self.processName = mp.current_process().name
|
|
|
|
except StandardError:
|
|
|
|
pass
|
2006-03-13 18:05:28 -04:00
|
|
|
if logProcesses and hasattr(os, 'getpid'):
|
2003-02-21 18:29:45 -04:00
|
|
|
self.process = os.getpid()
|
|
|
|
else:
|
|
|
|
self.process = None
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
|
|
|
|
self.pathname, self.lineno, self.msg)
|
|
|
|
|
|
|
|
def getMessage(self):
|
|
|
|
"""
|
|
|
|
Return the message for this LogRecord.
|
|
|
|
|
|
|
|
Return the message for this LogRecord after merging any user-supplied
|
|
|
|
arguments with the message.
|
|
|
|
"""
|
2009-10-10 17:32:36 -03:00
|
|
|
if not _unicode: #if no unicode support...
|
2003-02-18 10:20:07 -04:00
|
|
|
msg = str(self.msg)
|
|
|
|
else:
|
2005-10-07 05:35:36 -03:00
|
|
|
msg = self.msg
|
2009-10-10 17:32:36 -03:00
|
|
|
if not isinstance(msg, basestring):
|
2005-10-07 05:35:36 -03:00
|
|
|
try:
|
|
|
|
msg = str(self.msg)
|
|
|
|
except UnicodeError:
|
|
|
|
msg = self.msg #Defer encoding till later
|
2002-11-13 12:15:58 -04:00
|
|
|
if self.args:
|
|
|
|
msg = msg % self.args
|
|
|
|
return msg
|
|
|
|
|
2003-06-27 18:43:39 -03:00
|
|
|
def makeLogRecord(dict):
|
|
|
|
"""
|
|
|
|
Make a LogRecord whose attributes are defined by the specified dictionary,
|
|
|
|
This function is useful for converting a logging event received over
|
|
|
|
a socket connection (which is sent as a dictionary) into a LogRecord
|
|
|
|
instance.
|
|
|
|
"""
|
2006-02-09 04:48:36 -04:00
|
|
|
rv = LogRecord(None, None, "", 0, "", (), None, None)
|
2003-06-27 18:43:39 -03:00
|
|
|
rv.__dict__.update(dict)
|
|
|
|
return rv
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Formatter classes and functions
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class Formatter(object):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Formatter instances are used to convert a LogRecord to text.
|
|
|
|
|
|
|
|
Formatters need to know how a LogRecord is constructed. They are
|
|
|
|
responsible for converting a LogRecord to (usually) a string which can
|
|
|
|
be interpreted by either a human or an external system. The base Formatter
|
|
|
|
allows a formatting string to be specified. If none is supplied, the
|
|
|
|
default value of "%s(message)\\n" is used.
|
|
|
|
|
|
|
|
The Formatter can be initialized with a format string which makes use of
|
|
|
|
knowledge of the LogRecord attributes - e.g. the default value mentioned
|
|
|
|
above makes use of the fact that the user's message and arguments are pre-
|
|
|
|
formatted into a LogRecord's message attribute. Currently, the useful
|
|
|
|
attributes in a LogRecord are described by:
|
|
|
|
|
|
|
|
%(name)s Name of the logger (logging channel)
|
|
|
|
%(levelno)s Numeric logging level for the message (DEBUG, INFO,
|
2003-02-18 10:20:07 -04:00
|
|
|
WARNING, ERROR, CRITICAL)
|
2002-11-13 12:15:58 -04:00
|
|
|
%(levelname)s Text logging level for the message ("DEBUG", "INFO",
|
2003-02-18 10:20:07 -04:00
|
|
|
"WARNING", "ERROR", "CRITICAL")
|
2002-11-13 12:15:58 -04:00
|
|
|
%(pathname)s Full pathname of the source file where the logging
|
|
|
|
call was issued (if available)
|
|
|
|
%(filename)s Filename portion of pathname
|
|
|
|
%(module)s Module (name portion of filename)
|
|
|
|
%(lineno)d Source line number where the logging call was issued
|
|
|
|
(if available)
|
2006-02-09 04:48:36 -04:00
|
|
|
%(funcName)s Function name
|
2002-11-13 12:15:58 -04:00
|
|
|
%(created)f Time when the LogRecord was created (time.time()
|
|
|
|
return value)
|
|
|
|
%(asctime)s Textual time when the LogRecord was created
|
|
|
|
%(msecs)d Millisecond portion of the creation time
|
|
|
|
%(relativeCreated)d Time in milliseconds when the LogRecord was created,
|
|
|
|
relative to the time the logging module was loaded
|
|
|
|
(typically at application startup time)
|
|
|
|
%(thread)d Thread ID (if available)
|
2005-03-31 16:16:55 -04:00
|
|
|
%(threadName)s Thread name (if available)
|
2003-02-18 10:20:07 -04:00
|
|
|
%(process)d Process ID (if available)
|
2002-11-13 12:15:58 -04:00
|
|
|
%(message)s The result of record.getMessage(), computed just as
|
|
|
|
the record is emitted
|
|
|
|
"""
|
|
|
|
|
|
|
|
converter = time.localtime
|
|
|
|
|
|
|
|
def __init__(self, fmt=None, datefmt=None):
|
|
|
|
"""
|
|
|
|
Initialize the formatter with specified format strings.
|
|
|
|
|
|
|
|
Initialize the formatter either with the specified format string, or a
|
|
|
|
default as described above. Allow for specialized date formatting with
|
|
|
|
the optional datefmt argument (if omitted, you get the ISO8601 format).
|
|
|
|
"""
|
|
|
|
if fmt:
|
|
|
|
self._fmt = fmt
|
|
|
|
else:
|
|
|
|
self._fmt = "%(message)s"
|
|
|
|
self.datefmt = datefmt
|
|
|
|
|
|
|
|
def formatTime(self, record, datefmt=None):
|
|
|
|
"""
|
|
|
|
Return the creation time of the specified LogRecord as formatted text.
|
|
|
|
|
|
|
|
This method should be called from format() by a formatter which
|
|
|
|
wants to make use of a formatted time. This method can be overridden
|
|
|
|
in formatters to provide for any specific requirement, but the
|
|
|
|
basic behaviour is as follows: if datefmt (a string) is specified,
|
|
|
|
it is used with time.strftime() to format the creation time of the
|
|
|
|
record. Otherwise, the ISO8601 format is used. The resulting
|
|
|
|
string is returned. This function uses a user-configurable function
|
|
|
|
to convert the creation time to a tuple. By default, time.localtime()
|
|
|
|
is used; to change this for a particular formatter instance, set the
|
|
|
|
'converter' attribute to a function with the same signature as
|
|
|
|
time.localtime() or time.gmtime(). To change it for all formatters,
|
|
|
|
for example if you want all logging times to be shown in GMT,
|
|
|
|
set the 'converter' attribute in the Formatter class.
|
|
|
|
"""
|
|
|
|
ct = self.converter(record.created)
|
|
|
|
if datefmt:
|
|
|
|
s = time.strftime(datefmt, ct)
|
|
|
|
else:
|
|
|
|
t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
|
|
|
|
s = "%s,%03d" % (t, record.msecs)
|
|
|
|
return s
|
|
|
|
|
|
|
|
def formatException(self, ei):
|
|
|
|
"""
|
|
|
|
Format and return the specified exception information as a string.
|
|
|
|
|
|
|
|
This default implementation just uses
|
|
|
|
traceback.print_exception()
|
|
|
|
"""
|
|
|
|
sio = cStringIO.StringIO()
|
|
|
|
traceback.print_exception(ei[0], ei[1], ei[2], None, sio)
|
|
|
|
s = sio.getvalue()
|
|
|
|
sio.close()
|
2007-06-19 09:36:00 -03:00
|
|
|
if s[-1:] == "\n":
|
2002-11-13 12:15:58 -04:00
|
|
|
s = s[:-1]
|
|
|
|
return s
|
|
|
|
|
2010-03-05 18:11:24 -04:00
|
|
|
def usesTime(self):
|
|
|
|
"""
|
|
|
|
Check if the format uses the creation time of the record.
|
|
|
|
"""
|
|
|
|
return self._fmt.find("%(asctime)") >= 0
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
def format(self, record):
|
|
|
|
"""
|
|
|
|
Format the specified record as text.
|
|
|
|
|
|
|
|
The record's attribute dictionary is used as the operand to a
|
|
|
|
string formatting operation which yields the returned string.
|
|
|
|
Before formatting the dictionary, a couple of preparatory steps
|
|
|
|
are carried out. The message attribute of the record is computed
|
2010-03-05 18:11:24 -04:00
|
|
|
using LogRecord.getMessage(). If the formatting string uses the
|
|
|
|
time (as determined by a call to usesTime(), formatTime() is
|
|
|
|
called to format the event time. If there is exception information,
|
|
|
|
it is formatted using formatException() and appended to the message.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
record.message = record.getMessage()
|
2010-03-05 18:11:24 -04:00
|
|
|
if self.usesTime():
|
2002-11-13 12:15:58 -04:00
|
|
|
record.asctime = self.formatTime(record, self.datefmt)
|
|
|
|
s = self._fmt % record.__dict__
|
|
|
|
if record.exc_info:
|
2004-02-20 09:18:36 -04:00
|
|
|
# Cache the traceback text to avoid converting it multiple times
|
|
|
|
# (it's constant anyway)
|
|
|
|
if not record.exc_text:
|
|
|
|
record.exc_text = self.formatException(record.exc_info)
|
|
|
|
if record.exc_text:
|
2007-06-19 09:36:00 -03:00
|
|
|
if s[-1:] != "\n":
|
2002-11-13 12:15:58 -04:00
|
|
|
s = s + "\n"
|
2010-06-11 19:56:50 -03:00
|
|
|
try:
|
|
|
|
s = s + record.exc_text
|
|
|
|
except UnicodeError:
|
|
|
|
# Sometimes filenames have non-ASCII chars, which can lead
|
|
|
|
# to errors when s is Unicode and record.exc_text is str
|
|
|
|
# See issue 8924
|
|
|
|
s = s + record.exc_text.decode(sys.getfilesystemencoding())
|
2002-11-13 12:15:58 -04:00
|
|
|
return s
|
|
|
|
|
|
|
|
#
|
|
|
|
# The default formatter to use when no other is specified
|
|
|
|
#
|
|
|
|
_defaultFormatter = Formatter()
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class BufferingFormatter(object):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
A formatter suitable for formatting a number of records.
|
|
|
|
"""
|
|
|
|
def __init__(self, linefmt=None):
|
|
|
|
"""
|
|
|
|
Optionally specify a formatter which will be used to format each
|
|
|
|
individual record.
|
|
|
|
"""
|
|
|
|
if linefmt:
|
|
|
|
self.linefmt = linefmt
|
|
|
|
else:
|
|
|
|
self.linefmt = _defaultFormatter
|
|
|
|
|
|
|
|
def formatHeader(self, records):
|
|
|
|
"""
|
|
|
|
Return the header string for the specified records.
|
|
|
|
"""
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def formatFooter(self, records):
|
|
|
|
"""
|
|
|
|
Return the footer string for the specified records.
|
|
|
|
"""
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def format(self, records):
|
|
|
|
"""
|
|
|
|
Format the specified records and return the result as a string.
|
|
|
|
"""
|
|
|
|
rv = ""
|
|
|
|
if len(records) > 0:
|
|
|
|
rv = rv + self.formatHeader(records)
|
|
|
|
for record in records:
|
|
|
|
rv = rv + self.linefmt.format(record)
|
|
|
|
rv = rv + self.formatFooter(records)
|
|
|
|
return rv
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Filter classes and functions
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class Filter(object):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Filter instances are used to perform arbitrary filtering of LogRecords.
|
|
|
|
|
|
|
|
Loggers and Handlers can optionally use Filter instances to filter
|
|
|
|
records as desired. The base filter class only allows events which are
|
|
|
|
below a certain point in the logger hierarchy. For example, a filter
|
|
|
|
initialized with "A.B" will allow events logged by loggers "A.B",
|
|
|
|
"A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
|
|
|
|
initialized with the empty string, all events are passed.
|
|
|
|
"""
|
|
|
|
def __init__(self, name=''):
|
|
|
|
"""
|
|
|
|
Initialize a filter.
|
|
|
|
|
|
|
|
Initialize with the name of the logger which, together with its
|
|
|
|
children, will have its events allowed through the filter. If no
|
|
|
|
name is specified, allow every event.
|
|
|
|
"""
|
|
|
|
self.name = name
|
|
|
|
self.nlen = len(name)
|
|
|
|
|
|
|
|
def filter(self, record):
|
|
|
|
"""
|
|
|
|
Determine if the specified record is to be logged.
|
|
|
|
|
|
|
|
Is the specified record to be logged? Returns 0 for no, nonzero for
|
|
|
|
yes. If deemed appropriate, the record may be modified in-place.
|
|
|
|
"""
|
|
|
|
if self.nlen == 0:
|
|
|
|
return 1
|
|
|
|
elif self.name == record.name:
|
|
|
|
return 1
|
2009-10-10 17:32:36 -03:00
|
|
|
elif record.name.find(self.name, 0, self.nlen) != 0:
|
2002-11-13 12:15:58 -04:00
|
|
|
return 0
|
|
|
|
return (record.name[self.nlen] == ".")
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class Filterer(object):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
A base class for loggers and handlers which allows them to share
|
|
|
|
common code.
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
"""
|
|
|
|
Initialize the list of filters to be an empty list.
|
|
|
|
"""
|
|
|
|
self.filters = []
|
|
|
|
|
|
|
|
def addFilter(self, filter):
|
|
|
|
"""
|
|
|
|
Add the specified filter to this handler.
|
|
|
|
"""
|
|
|
|
if not (filter in self.filters):
|
|
|
|
self.filters.append(filter)
|
|
|
|
|
|
|
|
def removeFilter(self, filter):
|
|
|
|
"""
|
|
|
|
Remove the specified filter from this handler.
|
|
|
|
"""
|
|
|
|
if filter in self.filters:
|
|
|
|
self.filters.remove(filter)
|
|
|
|
|
|
|
|
def filter(self, record):
|
|
|
|
"""
|
|
|
|
Determine if a record is loggable by consulting all the filters.
|
|
|
|
|
|
|
|
The default is to allow the record to be logged; any filter can veto
|
|
|
|
this and the record is then dropped. Returns a zero value if a record
|
|
|
|
is to be dropped, else non-zero.
|
|
|
|
"""
|
|
|
|
rv = 1
|
|
|
|
for f in self.filters:
|
|
|
|
if not f.filter(record):
|
|
|
|
rv = 0
|
|
|
|
break
|
|
|
|
return rv
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Handler classes and functions
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2009-11-25 05:22:47 -04:00
|
|
|
_handlers = weakref.WeakValueDictionary() #map of handler names to handlers
|
2005-09-08 15:14:16 -03:00
|
|
|
_handlerList = [] # added to allow handlers to be removed in reverse of order initialized
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2009-11-25 05:03:30 -04:00
|
|
|
def _removeHandlerRef(wr):
|
|
|
|
"""
|
|
|
|
Remove a handler reference from the internal cleanup list.
|
|
|
|
"""
|
2010-08-23 14:50:30 -03:00
|
|
|
# This function can be called during module teardown, when globals are
|
|
|
|
# set to None. If _acquireLock is None, assume this is the case and do
|
|
|
|
# nothing.
|
|
|
|
if _acquireLock is not None:
|
|
|
|
_acquireLock()
|
|
|
|
try:
|
|
|
|
if wr in _handlerList:
|
|
|
|
_handlerList.remove(wr)
|
|
|
|
finally:
|
|
|
|
_releaseLock()
|
2009-11-25 05:03:30 -04:00
|
|
|
|
|
|
|
def _addHandlerRef(handler):
|
|
|
|
"""
|
|
|
|
Add a handler to the internal cleanup list using a weak reference.
|
|
|
|
"""
|
|
|
|
_acquireLock()
|
|
|
|
try:
|
2009-11-25 10:12:03 -04:00
|
|
|
_handlerList.append(weakref.ref(handler, _removeHandlerRef))
|
2009-11-25 05:03:30 -04:00
|
|
|
finally:
|
|
|
|
_releaseLock()
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
class Handler(Filterer):
|
|
|
|
"""
|
|
|
|
Handler instances dispatch logging events to specific destinations.
|
|
|
|
|
|
|
|
The base handler class. Acts as a placeholder which defines the Handler
|
|
|
|
interface. Handlers can optionally use Formatter instances to format
|
|
|
|
records as desired. By default, no formatter is specified; in this case,
|
|
|
|
the 'raw' message as determined by record.message is logged.
|
|
|
|
"""
|
|
|
|
def __init__(self, level=NOTSET):
|
|
|
|
"""
|
|
|
|
Initializes the instance - basically setting the formatter to None
|
|
|
|
and the filter list to empty.
|
|
|
|
"""
|
|
|
|
Filterer.__init__(self)
|
2009-11-24 11:53:25 -04:00
|
|
|
self._name = None
|
2009-07-13 08:21:05 -03:00
|
|
|
self.level = _checkLevel(level)
|
2002-11-13 12:15:58 -04:00
|
|
|
self.formatter = None
|
2009-11-25 05:03:30 -04:00
|
|
|
# Add the handler to the global _handlerList (for cleanup on shutdown)
|
|
|
|
_addHandlerRef(self)
|
2002-11-13 12:15:58 -04:00
|
|
|
self.createLock()
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
def get_name(self):
|
|
|
|
return self._name
|
|
|
|
|
|
|
|
def set_name(self, name):
|
|
|
|
_acquireLock()
|
|
|
|
try:
|
|
|
|
if self._name in _handlers:
|
|
|
|
del _handlers[self._name]
|
|
|
|
self._name = name
|
|
|
|
if name:
|
|
|
|
_handlers[name] = self
|
|
|
|
finally:
|
|
|
|
_releaseLock()
|
|
|
|
|
|
|
|
name = property(get_name, set_name)
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
def createLock(self):
|
|
|
|
"""
|
|
|
|
Acquire a thread lock for serializing access to the underlying I/O.
|
|
|
|
"""
|
|
|
|
if thread:
|
2005-03-31 16:16:55 -04:00
|
|
|
self.lock = threading.RLock()
|
2002-11-13 12:15:58 -04:00
|
|
|
else:
|
|
|
|
self.lock = None
|
|
|
|
|
|
|
|
def acquire(self):
|
|
|
|
"""
|
|
|
|
Acquire the I/O thread lock.
|
|
|
|
"""
|
|
|
|
if self.lock:
|
|
|
|
self.lock.acquire()
|
|
|
|
|
|
|
|
def release(self):
|
|
|
|
"""
|
|
|
|
Release the I/O thread lock.
|
|
|
|
"""
|
|
|
|
if self.lock:
|
|
|
|
self.lock.release()
|
|
|
|
|
|
|
|
def setLevel(self, level):
|
|
|
|
"""
|
|
|
|
Set the logging level of this handler.
|
|
|
|
"""
|
2009-07-13 08:21:05 -03:00
|
|
|
self.level = _checkLevel(level)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def format(self, record):
|
|
|
|
"""
|
|
|
|
Format the specified record.
|
|
|
|
|
|
|
|
If a formatter is set, use it. Otherwise, use the default formatter
|
|
|
|
for the module.
|
|
|
|
"""
|
|
|
|
if self.formatter:
|
|
|
|
fmt = self.formatter
|
|
|
|
else:
|
|
|
|
fmt = _defaultFormatter
|
|
|
|
return fmt.format(record)
|
|
|
|
|
|
|
|
def emit(self, record):
|
|
|
|
"""
|
|
|
|
Do whatever it takes to actually log the specified logging record.
|
|
|
|
|
|
|
|
This version is intended to be implemented by subclasses and so
|
|
|
|
raises a NotImplementedError.
|
|
|
|
"""
|
2009-10-10 17:32:36 -03:00
|
|
|
raise NotImplementedError('emit must be implemented '
|
|
|
|
'by Handler subclasses')
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def handle(self, record):
|
|
|
|
"""
|
|
|
|
Conditionally emit the specified logging record.
|
|
|
|
|
|
|
|
Emission depends on filters which may have been added to the handler.
|
|
|
|
Wrap the actual emission of the record with acquisition/release of
|
2003-02-18 10:20:07 -04:00
|
|
|
the I/O thread lock. Returns whether the filter passed the record for
|
|
|
|
emission.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2003-02-18 10:20:07 -04:00
|
|
|
rv = self.filter(record)
|
|
|
|
if rv:
|
2002-11-13 12:15:58 -04:00
|
|
|
self.acquire()
|
|
|
|
try:
|
|
|
|
self.emit(record)
|
|
|
|
finally:
|
|
|
|
self.release()
|
2003-02-18 10:20:07 -04:00
|
|
|
return rv
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def setFormatter(self, fmt):
|
|
|
|
"""
|
|
|
|
Set the formatter for this handler.
|
|
|
|
"""
|
|
|
|
self.formatter = fmt
|
|
|
|
|
|
|
|
def flush(self):
|
|
|
|
"""
|
|
|
|
Ensure all logging output has been flushed.
|
|
|
|
|
|
|
|
This version does nothing and is intended to be implemented by
|
|
|
|
subclasses.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
"""
|
|
|
|
Tidy up any resources used by the handler.
|
|
|
|
|
2009-11-25 05:03:30 -04:00
|
|
|
This version removes the handler from an internal map of handlers,
|
|
|
|
_handlers, which is used for handler lookup by name. Subclasses
|
2004-02-20 09:18:36 -04:00
|
|
|
should ensure that this gets called from overridden close()
|
|
|
|
methods.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2004-02-20 09:18:36 -04:00
|
|
|
#get the module data lock, as we're updating a shared structure.
|
|
|
|
_acquireLock()
|
|
|
|
try: #unlikely to raise an exception, but you never know...
|
2009-11-24 11:53:25 -04:00
|
|
|
if self._name and self._name in _handlers:
|
|
|
|
del _handlers[self._name]
|
2004-02-20 09:18:36 -04:00
|
|
|
finally:
|
|
|
|
_releaseLock()
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2003-02-18 10:20:07 -04:00
|
|
|
def handleError(self, record):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Handle errors which occur during an emit() call.
|
|
|
|
|
|
|
|
This method should be called from handlers when an exception is
|
2003-02-18 10:20:07 -04:00
|
|
|
encountered during an emit() call. If raiseExceptions is false,
|
2002-11-13 12:15:58 -04:00
|
|
|
exceptions get silently ignored. This is what is mostly wanted
|
|
|
|
for a logging system - most users will not care about errors in
|
|
|
|
the logging system, they are more interested in application errors.
|
|
|
|
You could, however, replace this with a custom handler if you wish.
|
2003-02-18 10:20:07 -04:00
|
|
|
The record which was being processed is passed in to this method.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
if raiseExceptions:
|
|
|
|
ei = sys.exc_info()
|
2009-05-09 09:07:17 -03:00
|
|
|
try:
|
2010-02-07 08:56:54 -04:00
|
|
|
traceback.print_exception(ei[0], ei[1], ei[2],
|
|
|
|
None, sys.stderr)
|
|
|
|
sys.stderr.write('Logged from file %s, line %s\n' % (
|
|
|
|
record.filename, record.lineno))
|
2009-05-09 09:07:17 -03:00
|
|
|
except IOError:
|
|
|
|
pass # see issue 5971
|
|
|
|
finally:
|
|
|
|
del ei
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
class StreamHandler(Handler):
|
|
|
|
"""
|
|
|
|
A handler class which writes logging records, appropriately formatted,
|
|
|
|
to a stream. Note that this class does not close the stream, as
|
|
|
|
sys.stdout or sys.stderr may be used.
|
|
|
|
"""
|
2008-09-01 11:30:10 -03:00
|
|
|
|
2009-09-26 11:53:32 -03:00
|
|
|
def __init__(self, stream=None):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Initialize the handler.
|
|
|
|
|
2009-09-26 11:53:32 -03:00
|
|
|
If stream is not specified, sys.stderr is used.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Handler.__init__(self)
|
2009-09-26 11:53:32 -03:00
|
|
|
if stream is None:
|
|
|
|
stream = sys.stderr
|
|
|
|
self.stream = stream
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def flush(self):
|
|
|
|
"""
|
|
|
|
Flushes the stream.
|
|
|
|
"""
|
2008-06-17 08:02:14 -03:00
|
|
|
if self.stream and hasattr(self.stream, "flush"):
|
2007-09-27 02:34:45 -03:00
|
|
|
self.stream.flush()
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def emit(self, record):
|
|
|
|
"""
|
|
|
|
Emit a record.
|
|
|
|
|
|
|
|
If a formatter is specified, it is used to format the record.
|
2008-09-01 11:30:10 -03:00
|
|
|
The record is then written to the stream with a trailing newline. If
|
|
|
|
exception information is present, it is formatted using
|
|
|
|
traceback.print_exception and appended to the stream. If the stream
|
2009-04-16 16:07:37 -03:00
|
|
|
has an 'encoding' attribute, it is used to determine how to do the
|
2008-09-01 11:30:10 -03:00
|
|
|
output to the stream.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
msg = self.format(record)
|
2009-02-08 15:06:08 -04:00
|
|
|
stream = self.stream
|
2004-09-22 09:39:26 -03:00
|
|
|
fs = "%s\n"
|
2009-10-10 17:32:36 -03:00
|
|
|
if not _unicode: #if no unicode support...
|
2009-02-08 15:06:08 -04:00
|
|
|
stream.write(fs % msg)
|
2003-02-18 10:20:07 -04:00
|
|
|
else:
|
|
|
|
try:
|
2009-04-16 16:07:37 -03:00
|
|
|
if (isinstance(msg, unicode) and
|
|
|
|
getattr(stream, 'encoding', None)):
|
2009-12-11 05:16:01 -04:00
|
|
|
ufs = fs.decode(stream.encoding)
|
2009-04-22 09:10:47 -03:00
|
|
|
try:
|
2009-12-11 05:16:01 -04:00
|
|
|
stream.write(ufs % msg)
|
2009-04-22 09:10:47 -03:00
|
|
|
except UnicodeEncodeError:
|
|
|
|
#Printing to terminals sometimes fails. For example,
|
|
|
|
#with an encoding of 'cp1251', the above write will
|
|
|
|
#work if written to a stream opened or wrapped by
|
|
|
|
#the codecs module, but fail when writing to a
|
|
|
|
#terminal even when the codepage is set to cp1251.
|
|
|
|
#An extra encoding step seems to be needed.
|
2009-12-11 05:16:01 -04:00
|
|
|
stream.write((ufs % msg).encode(stream.encoding))
|
2008-09-01 11:30:10 -03:00
|
|
|
else:
|
2009-04-16 16:07:37 -03:00
|
|
|
stream.write(fs % msg)
|
2003-02-18 10:20:07 -04:00
|
|
|
except UnicodeError:
|
2009-02-08 15:06:08 -04:00
|
|
|
stream.write(fs % msg.encode("UTF-8"))
|
2002-11-13 12:15:58 -04:00
|
|
|
self.flush()
|
2005-10-31 09:14:19 -04:00
|
|
|
except (KeyboardInterrupt, SystemExit):
|
|
|
|
raise
|
2002-11-13 12:15:58 -04:00
|
|
|
except:
|
2003-02-18 10:20:07 -04:00
|
|
|
self.handleError(record)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
class FileHandler(StreamHandler):
|
|
|
|
"""
|
|
|
|
A handler class which writes formatted logging records to disk files.
|
|
|
|
"""
|
2008-01-24 08:37:08 -04:00
|
|
|
def __init__(self, filename, mode='a', encoding=None, delay=0):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Open the specified file and use it as the stream for logging.
|
|
|
|
"""
|
2004-07-08 07:22:35 -03:00
|
|
|
#keep the absolute path, otherwise derived classes which use this
|
|
|
|
#may come a cropper when the current directory changes
|
2007-01-09 10:50:28 -04:00
|
|
|
if codecs is None:
|
|
|
|
encoding = None
|
2004-07-08 07:22:35 -03:00
|
|
|
self.baseFilename = os.path.abspath(filename)
|
2002-11-13 12:15:58 -04:00
|
|
|
self.mode = mode
|
2007-01-09 10:50:28 -04:00
|
|
|
self.encoding = encoding
|
2008-01-24 08:37:08 -04:00
|
|
|
if delay:
|
2009-01-20 18:43:17 -04:00
|
|
|
#We don't open the stream, but we still need to call the
|
|
|
|
#Handler constructor to set level, formatter, lock etc.
|
|
|
|
Handler.__init__(self)
|
2008-01-24 08:37:08 -04:00
|
|
|
self.stream = None
|
|
|
|
else:
|
2009-01-20 18:43:17 -04:00
|
|
|
StreamHandler.__init__(self, self._open())
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def close(self):
|
|
|
|
"""
|
|
|
|
Closes the stream.
|
|
|
|
"""
|
2007-09-27 02:34:45 -03:00
|
|
|
if self.stream:
|
|
|
|
self.flush()
|
2008-06-17 08:02:14 -03:00
|
|
|
if hasattr(self.stream, "close"):
|
|
|
|
self.stream.close()
|
2007-09-27 02:34:45 -03:00
|
|
|
StreamHandler.close(self)
|
|
|
|
self.stream = None
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2007-01-09 10:50:28 -04:00
|
|
|
def _open(self):
|
2007-01-09 10:54:56 -04:00
|
|
|
"""
|
|
|
|
Open the current base file with the (original) mode and encoding.
|
|
|
|
Return the resulting stream.
|
|
|
|
"""
|
2007-01-09 10:50:28 -04:00
|
|
|
if self.encoding is None:
|
|
|
|
stream = open(self.baseFilename, self.mode)
|
|
|
|
else:
|
|
|
|
stream = codecs.open(self.baseFilename, self.mode, self.encoding)
|
|
|
|
return stream
|
|
|
|
|
2008-01-24 08:37:08 -04:00
|
|
|
def emit(self, record):
|
|
|
|
"""
|
|
|
|
Emit a record.
|
|
|
|
|
|
|
|
If the stream was not opened because 'delay' was specified in the
|
|
|
|
constructor, open it before calling the superclass's emit.
|
|
|
|
"""
|
|
|
|
if self.stream is None:
|
2009-01-20 18:43:17 -04:00
|
|
|
self.stream = self._open()
|
2008-01-24 08:37:08 -04:00
|
|
|
StreamHandler.emit(self, record)
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Manager classes and functions
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class PlaceHolder(object):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
PlaceHolder instances are used in the Manager logger hierarchy to take
|
2004-02-28 12:07:46 -04:00
|
|
|
the place of nodes for which no loggers have been defined. This class is
|
|
|
|
intended for internal use only and not as part of the public API.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
def __init__(self, alogger):
|
|
|
|
"""
|
|
|
|
Initialize with the specified logger being a child of this placeholder.
|
|
|
|
"""
|
2005-10-14 06:36:35 -03:00
|
|
|
#self.loggers = [alogger]
|
|
|
|
self.loggerMap = { alogger : None }
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def append(self, alogger):
|
|
|
|
"""
|
|
|
|
Add the specified logger as a child of this placeholder.
|
|
|
|
"""
|
2005-10-14 06:36:35 -03:00
|
|
|
#if alogger not in self.loggers:
|
2008-12-13 21:46:11 -04:00
|
|
|
if alogger not in self.loggerMap:
|
2005-10-14 06:36:35 -03:00
|
|
|
#self.loggers.append(alogger)
|
|
|
|
self.loggerMap[alogger] = None
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
#
|
|
|
|
# Determine which class to use when instantiating loggers.
|
|
|
|
#
|
|
|
|
_loggerClass = None
|
|
|
|
|
|
|
|
def setLoggerClass(klass):
|
|
|
|
"""
|
|
|
|
Set the class to be used when instantiating a logger. The class should
|
|
|
|
define __init__() such that only a name argument is required, and the
|
|
|
|
__init__() should call Logger.__init__()
|
|
|
|
"""
|
|
|
|
if klass != Logger:
|
|
|
|
if not issubclass(klass, Logger):
|
2009-10-10 17:32:36 -03:00
|
|
|
raise TypeError("logger not derived from logging.Logger: "
|
|
|
|
+ klass.__name__)
|
2002-11-13 12:15:58 -04:00
|
|
|
global _loggerClass
|
|
|
|
_loggerClass = klass
|
|
|
|
|
2004-09-22 09:39:26 -03:00
|
|
|
def getLoggerClass():
|
|
|
|
"""
|
|
|
|
Return the class to be used when instantiating a logger.
|
|
|
|
"""
|
|
|
|
|
|
|
|
return _loggerClass
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class Manager(object):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
There is [under normal circumstances] just one Manager instance, which
|
|
|
|
holds the hierarchy of loggers.
|
|
|
|
"""
|
2002-11-15 19:31:28 -04:00
|
|
|
def __init__(self, rootnode):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Initialize the manager with the root node of the logger hierarchy.
|
|
|
|
"""
|
2002-11-15 19:31:28 -04:00
|
|
|
self.root = rootnode
|
2002-11-13 12:15:58 -04:00
|
|
|
self.disable = 0
|
|
|
|
self.emittedNoHandlerWarning = 0
|
|
|
|
self.loggerDict = {}
|
2010-02-06 21:37:08 -04:00
|
|
|
self.loggerClass = None
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def getLogger(self, name):
|
|
|
|
"""
|
|
|
|
Get a logger with the specified name (channel name), creating it
|
2004-09-22 09:39:26 -03:00
|
|
|
if it doesn't yet exist. This name is a dot-separated hierarchical
|
|
|
|
name, such as "a", "a.b", "a.b.c" or similar.
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
If a PlaceHolder existed for the specified name [i.e. the logger
|
|
|
|
didn't exist but a child of it did], replace it with the created
|
|
|
|
logger and fix up the parent/child references which pointed to the
|
|
|
|
placeholder to now point to the logger.
|
|
|
|
"""
|
|
|
|
rv = None
|
|
|
|
_acquireLock()
|
|
|
|
try:
|
2008-08-03 21:09:43 -03:00
|
|
|
if name in self.loggerDict:
|
2002-11-13 12:15:58 -04:00
|
|
|
rv = self.loggerDict[name]
|
|
|
|
if isinstance(rv, PlaceHolder):
|
|
|
|
ph = rv
|
2010-02-06 21:37:08 -04:00
|
|
|
rv = (self.loggerClass or _loggerClass)(name)
|
2002-11-13 12:15:58 -04:00
|
|
|
rv.manager = self
|
|
|
|
self.loggerDict[name] = rv
|
|
|
|
self._fixupChildren(ph, rv)
|
|
|
|
self._fixupParents(rv)
|
|
|
|
else:
|
2010-02-06 21:37:08 -04:00
|
|
|
rv = (self.loggerClass or _loggerClass)(name)
|
2002-11-13 12:15:58 -04:00
|
|
|
rv.manager = self
|
|
|
|
self.loggerDict[name] = rv
|
|
|
|
self._fixupParents(rv)
|
|
|
|
finally:
|
|
|
|
_releaseLock()
|
|
|
|
return rv
|
|
|
|
|
2010-02-06 21:37:08 -04:00
|
|
|
def setLoggerClass(self, klass):
|
|
|
|
"""
|
|
|
|
Set the class to be used when instantiating a logger with this Manager.
|
|
|
|
"""
|
|
|
|
if klass != Logger:
|
|
|
|
if not issubclass(klass, Logger):
|
|
|
|
raise TypeError("logger not derived from logging.Logger: "
|
|
|
|
+ klass.__name__)
|
|
|
|
self.loggerClass = klass
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
def _fixupParents(self, alogger):
|
|
|
|
"""
|
|
|
|
Ensure that there are either loggers or placeholders all the way
|
|
|
|
from the specified logger to the root of the logger hierarchy.
|
|
|
|
"""
|
|
|
|
name = alogger.name
|
2009-10-10 17:32:36 -03:00
|
|
|
i = name.rfind(".")
|
2002-11-13 12:15:58 -04:00
|
|
|
rv = None
|
|
|
|
while (i > 0) and not rv:
|
|
|
|
substr = name[:i]
|
2008-08-03 21:09:43 -03:00
|
|
|
if substr not in self.loggerDict:
|
2002-11-13 12:15:58 -04:00
|
|
|
self.loggerDict[substr] = PlaceHolder(alogger)
|
|
|
|
else:
|
|
|
|
obj = self.loggerDict[substr]
|
|
|
|
if isinstance(obj, Logger):
|
|
|
|
rv = obj
|
|
|
|
else:
|
|
|
|
assert isinstance(obj, PlaceHolder)
|
|
|
|
obj.append(alogger)
|
2009-10-10 17:32:36 -03:00
|
|
|
i = name.rfind(".", 0, i - 1)
|
2002-11-13 12:15:58 -04:00
|
|
|
if not rv:
|
|
|
|
rv = self.root
|
|
|
|
alogger.parent = rv
|
|
|
|
|
|
|
|
def _fixupChildren(self, ph, alogger):
|
|
|
|
"""
|
|
|
|
Ensure that children of the placeholder ph are connected to the
|
|
|
|
specified logger.
|
|
|
|
"""
|
2006-10-31 13:32:37 -04:00
|
|
|
name = alogger.name
|
|
|
|
namelen = len(name)
|
2005-10-14 06:36:35 -03:00
|
|
|
for c in ph.loggerMap.keys():
|
2006-10-31 13:32:37 -04:00
|
|
|
#The if means ... if not c.parent.name.startswith(nm)
|
|
|
|
if c.parent.name[:namelen] != name:
|
2002-11-13 12:15:58 -04:00
|
|
|
alogger.parent = c.parent
|
|
|
|
c.parent = alogger
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Logger classes and functions
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
class Logger(Filterer):
|
|
|
|
"""
|
|
|
|
Instances of the Logger class represent a single logging channel. A
|
|
|
|
"logging channel" indicates an area of an application. Exactly how an
|
|
|
|
"area" is defined is up to the application developer. Since an
|
|
|
|
application can have any number of areas, logging channels are identified
|
|
|
|
by a unique string. Application areas can be nested (e.g. an area
|
|
|
|
of "input processing" might include sub-areas "read CSV files", "read
|
|
|
|
XLS files" and "read Gnumeric files"). To cater for this natural nesting,
|
|
|
|
channel names are organized into a namespace hierarchy where levels are
|
|
|
|
separated by periods, much like the Java or Python package namespace. So
|
|
|
|
in the instance given above, channel names might be "input" for the upper
|
|
|
|
level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
|
|
|
|
There is no arbitrary limit to the depth of nesting.
|
|
|
|
"""
|
|
|
|
def __init__(self, name, level=NOTSET):
|
|
|
|
"""
|
|
|
|
Initialize the logger with a name and an optional level.
|
|
|
|
"""
|
|
|
|
Filterer.__init__(self)
|
|
|
|
self.name = name
|
2009-07-13 08:21:05 -03:00
|
|
|
self.level = _checkLevel(level)
|
2002-11-13 12:15:58 -04:00
|
|
|
self.parent = None
|
|
|
|
self.propagate = 1
|
|
|
|
self.handlers = []
|
|
|
|
self.disabled = 0
|
|
|
|
|
|
|
|
def setLevel(self, level):
|
|
|
|
"""
|
|
|
|
Set the logging level of this logger.
|
|
|
|
"""
|
2009-07-13 08:21:05 -03:00
|
|
|
self.level = _checkLevel(level)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def debug(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log 'msg % args' with severity 'DEBUG'.
|
|
|
|
|
|
|
|
To pass exception information, use the keyword argument exc_info with
|
|
|
|
a true value, e.g.
|
|
|
|
|
|
|
|
logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
|
|
|
|
"""
|
2007-08-23 18:55:57 -03:00
|
|
|
if self.isEnabledFor(DEBUG):
|
2008-08-03 21:09:43 -03:00
|
|
|
self._log(DEBUG, msg, args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def info(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log 'msg % args' with severity 'INFO'.
|
|
|
|
|
|
|
|
To pass exception information, use the keyword argument exc_info with
|
|
|
|
a true value, e.g.
|
|
|
|
|
|
|
|
logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
|
|
|
|
"""
|
2007-08-23 18:55:57 -03:00
|
|
|
if self.isEnabledFor(INFO):
|
2008-08-03 21:09:43 -03:00
|
|
|
self._log(INFO, msg, args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2003-02-18 10:20:07 -04:00
|
|
|
def warning(self, msg, *args, **kwargs):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2003-02-18 10:20:07 -04:00
|
|
|
Log 'msg % args' with severity 'WARNING'.
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
To pass exception information, use the keyword argument exc_info with
|
|
|
|
a true value, e.g.
|
|
|
|
|
2003-02-18 10:20:07 -04:00
|
|
|
logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2003-02-18 10:20:07 -04:00
|
|
|
if self.isEnabledFor(WARNING):
|
2008-08-03 21:09:43 -03:00
|
|
|
self._log(WARNING, msg, args, **kwargs)
|
2003-02-18 10:20:07 -04:00
|
|
|
|
|
|
|
warn = warning
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def error(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log 'msg % args' with severity 'ERROR'.
|
|
|
|
|
|
|
|
To pass exception information, use the keyword argument exc_info with
|
|
|
|
a true value, e.g.
|
|
|
|
|
|
|
|
logger.error("Houston, we have a %s", "major problem", exc_info=1)
|
|
|
|
"""
|
|
|
|
if self.isEnabledFor(ERROR):
|
2008-08-03 21:09:43 -03:00
|
|
|
self._log(ERROR, msg, args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def exception(self, msg, *args):
|
|
|
|
"""
|
|
|
|
Convenience method for logging an ERROR with exception information.
|
|
|
|
"""
|
2009-10-10 17:32:36 -03:00
|
|
|
self.error(msg, exc_info=1, *args)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def critical(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log 'msg % args' with severity 'CRITICAL'.
|
|
|
|
|
|
|
|
To pass exception information, use the keyword argument exc_info with
|
|
|
|
a true value, e.g.
|
|
|
|
|
|
|
|
logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
|
|
|
|
"""
|
2007-08-23 18:55:57 -03:00
|
|
|
if self.isEnabledFor(CRITICAL):
|
2008-08-03 21:09:43 -03:00
|
|
|
self._log(CRITICAL, msg, args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
fatal = critical
|
|
|
|
|
|
|
|
def log(self, level, msg, *args, **kwargs):
|
|
|
|
"""
|
2004-08-04 05:38:08 -03:00
|
|
|
Log 'msg % args' with the integer severity 'level'.
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
To pass exception information, use the keyword argument exc_info with
|
|
|
|
a true value, e.g.
|
|
|
|
|
|
|
|
logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
|
|
|
|
"""
|
2009-10-10 17:32:36 -03:00
|
|
|
if not isinstance(level, int):
|
2004-07-03 08:47:26 -03:00
|
|
|
if raiseExceptions:
|
2009-10-10 17:32:36 -03:00
|
|
|
raise TypeError("level must be an integer")
|
2004-07-03 08:47:26 -03:00
|
|
|
else:
|
|
|
|
return
|
2002-11-13 12:15:58 -04:00
|
|
|
if self.isEnabledFor(level):
|
2008-08-03 21:09:43 -03:00
|
|
|
self._log(level, msg, args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def findCaller(self):
|
|
|
|
"""
|
|
|
|
Find the stack frame of the caller so that we can note the source
|
2005-02-18 07:53:32 -04:00
|
|
|
file name, line number and function name.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2009-09-29 04:08:54 -03:00
|
|
|
f = currentframe()
|
|
|
|
#On some versions of IronPython, currentframe() returns None if
|
|
|
|
#IronPython isn't run with -X:Frames.
|
|
|
|
if f is not None:
|
|
|
|
f = f.f_back
|
2006-03-15 08:45:07 -04:00
|
|
|
rv = "(unknown file)", 0, "(unknown function)"
|
|
|
|
while hasattr(f, "f_code"):
|
2003-01-23 14:29:29 -04:00
|
|
|
co = f.f_code
|
|
|
|
filename = os.path.normcase(co.co_filename)
|
|
|
|
if filename == _srcfile:
|
|
|
|
f = f.f_back
|
|
|
|
continue
|
2010-10-10 17:36:04 -03:00
|
|
|
rv = (co.co_filename, f.f_lineno, co.co_name)
|
2006-03-15 08:45:07 -04:00
|
|
|
break
|
|
|
|
return rv
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2006-02-09 04:48:36 -04:00
|
|
|
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
A factory method which can be overridden in subclasses to create
|
|
|
|
specialized LogRecords.
|
|
|
|
"""
|
2006-02-09 04:48:36 -04:00
|
|
|
rv = LogRecord(name, level, fn, lno, msg, args, exc_info, func)
|
2008-01-18 11:55:57 -04:00
|
|
|
if extra is not None:
|
2006-02-09 04:34:14 -04:00
|
|
|
for key in extra:
|
|
|
|
if (key in ["message", "asctime"]) or (key in rv.__dict__):
|
|
|
|
raise KeyError("Attempt to overwrite %r in LogRecord" % key)
|
|
|
|
rv.__dict__[key] = extra[key]
|
|
|
|
return rv
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2006-02-09 04:34:14 -04:00
|
|
|
def _log(self, level, msg, args, exc_info=None, extra=None):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Low-level logging routine which creates a LogRecord and then calls
|
|
|
|
all the handlers of this logger to handle the record.
|
|
|
|
"""
|
2003-01-23 14:29:29 -04:00
|
|
|
if _srcfile:
|
2009-02-19 08:31:32 -04:00
|
|
|
#IronPython doesn't track Python frames, so findCaller throws an
|
2009-09-29 04:08:54 -03:00
|
|
|
#exception on some versions of IronPython. We trap it here so that
|
|
|
|
#IronPython can use logging.
|
2009-02-19 08:31:32 -04:00
|
|
|
try:
|
|
|
|
fn, lno, func = self.findCaller()
|
|
|
|
except ValueError:
|
|
|
|
fn, lno, func = "(unknown file)", 0, "(unknown function)"
|
2002-11-13 12:15:58 -04:00
|
|
|
else:
|
2005-02-18 07:53:32 -04:00
|
|
|
fn, lno, func = "(unknown file)", 0, "(unknown function)"
|
2002-11-13 12:15:58 -04:00
|
|
|
if exc_info:
|
2009-10-10 17:32:36 -03:00
|
|
|
if not isinstance(exc_info, tuple):
|
2004-02-20 09:18:36 -04:00
|
|
|
exc_info = sys.exc_info()
|
2006-02-09 04:48:36 -04:00
|
|
|
record = self.makeRecord(self.name, level, fn, lno, msg, args, exc_info, func, extra)
|
2002-11-13 12:15:58 -04:00
|
|
|
self.handle(record)
|
|
|
|
|
|
|
|
def handle(self, record):
|
|
|
|
"""
|
|
|
|
Call the handlers for the specified record.
|
|
|
|
|
|
|
|
This method is used for unpickled records received from a socket, as
|
|
|
|
well as those created locally. Logger-level filtering is applied.
|
|
|
|
"""
|
|
|
|
if (not self.disabled) and self.filter(record):
|
|
|
|
self.callHandlers(record)
|
|
|
|
|
|
|
|
def addHandler(self, hdlr):
|
|
|
|
"""
|
|
|
|
Add the specified handler to this logger.
|
|
|
|
"""
|
2010-09-25 14:42:36 -03:00
|
|
|
_acquireLock()
|
|
|
|
try:
|
|
|
|
if not (hdlr in self.handlers):
|
|
|
|
self.handlers.append(hdlr)
|
|
|
|
finally:
|
|
|
|
_releaseLock()
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def removeHandler(self, hdlr):
|
|
|
|
"""
|
|
|
|
Remove the specified handler from this logger.
|
|
|
|
"""
|
2010-09-25 14:42:36 -03:00
|
|
|
_acquireLock()
|
|
|
|
try:
|
|
|
|
if hdlr in self.handlers:
|
2005-09-16 07:33:40 -03:00
|
|
|
self.handlers.remove(hdlr)
|
2010-09-25 14:42:36 -03:00
|
|
|
finally:
|
|
|
|
_releaseLock()
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def callHandlers(self, record):
|
|
|
|
"""
|
|
|
|
Pass a record to all relevant handlers.
|
|
|
|
|
|
|
|
Loop through all handlers for this logger and its parents in the
|
|
|
|
logger hierarchy. If no handler was found, output a one-off error
|
|
|
|
message to sys.stderr. Stop searching up the hierarchy whenever a
|
|
|
|
logger with the "propagate" attribute set to zero is found - that
|
|
|
|
will be the last logger whose handlers are called.
|
|
|
|
"""
|
|
|
|
c = self
|
|
|
|
found = 0
|
|
|
|
while c:
|
|
|
|
for hdlr in c.handlers:
|
|
|
|
found = found + 1
|
|
|
|
if record.levelno >= hdlr.level:
|
|
|
|
hdlr.handle(record)
|
|
|
|
if not c.propagate:
|
|
|
|
c = None #break out
|
|
|
|
else:
|
|
|
|
c = c.parent
|
2005-10-23 19:32:59 -03:00
|
|
|
if (found == 0) and raiseExceptions and not self.manager.emittedNoHandlerWarning:
|
2002-11-13 12:15:58 -04:00
|
|
|
sys.stderr.write("No handlers could be found for logger"
|
|
|
|
" \"%s\"\n" % self.name)
|
|
|
|
self.manager.emittedNoHandlerWarning = 1
|
|
|
|
|
|
|
|
def getEffectiveLevel(self):
|
|
|
|
"""
|
|
|
|
Get the effective level for this logger.
|
|
|
|
|
|
|
|
Loop through this logger and its parents in the logger hierarchy,
|
|
|
|
looking for a non-zero logging level. Return the first one found.
|
|
|
|
"""
|
|
|
|
logger = self
|
|
|
|
while logger:
|
|
|
|
if logger.level:
|
|
|
|
return logger.level
|
|
|
|
logger = logger.parent
|
|
|
|
return NOTSET
|
|
|
|
|
|
|
|
def isEnabledFor(self, level):
|
|
|
|
"""
|
|
|
|
Is this logger enabled for level 'level'?
|
|
|
|
"""
|
|
|
|
if self.manager.disable >= level:
|
|
|
|
return 0
|
|
|
|
return level >= self.getEffectiveLevel()
|
|
|
|
|
2010-03-22 12:29:01 -03:00
|
|
|
def getChild(self, suffix):
|
|
|
|
"""
|
|
|
|
Get a logger which is a descendant to this one.
|
|
|
|
|
|
|
|
This is a convenience method, such that
|
|
|
|
|
|
|
|
logging.getLogger('abc').getChild('def.ghi')
|
|
|
|
|
|
|
|
is the same as
|
|
|
|
|
|
|
|
logging.getLogger('abc.def.ghi')
|
|
|
|
|
|
|
|
It's useful, for example, when the parent logger is named using
|
|
|
|
__name__ rather than a literal string.
|
|
|
|
"""
|
|
|
|
if self.root is not self:
|
|
|
|
suffix = '.'.join((self.name, suffix))
|
|
|
|
return self.manager.getLogger(suffix)
|
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
class RootLogger(Logger):
|
|
|
|
"""
|
|
|
|
A root logger is not that different to any other logger, except that
|
|
|
|
it must have a logging level and there is only one instance of it in
|
|
|
|
the hierarchy.
|
|
|
|
"""
|
|
|
|
def __init__(self, level):
|
|
|
|
"""
|
|
|
|
Initialize the logger with the name "root".
|
|
|
|
"""
|
|
|
|
Logger.__init__(self, "root", level)
|
|
|
|
|
|
|
|
_loggerClass = Logger
|
|
|
|
|
2009-11-24 11:53:25 -04:00
|
|
|
class LoggerAdapter(object):
|
2008-01-18 11:55:57 -04:00
|
|
|
"""
|
|
|
|
An adapter for loggers which makes it easier to specify contextual
|
|
|
|
information in logging output.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, logger, extra):
|
|
|
|
"""
|
|
|
|
Initialize the adapter with a logger and a dict-like object which
|
|
|
|
provides contextual information. This constructor signature allows
|
|
|
|
easy stacking of LoggerAdapters, if so desired.
|
|
|
|
|
|
|
|
You can effectively pass keyword arguments as shown in the
|
|
|
|
following example:
|
|
|
|
|
|
|
|
adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
|
|
|
|
"""
|
|
|
|
self.logger = logger
|
|
|
|
self.extra = extra
|
|
|
|
|
|
|
|
def process(self, msg, kwargs):
|
|
|
|
"""
|
|
|
|
Process the logging message and keyword arguments passed in to
|
|
|
|
a logging call to insert contextual information. You can either
|
|
|
|
manipulate the message itself, the keyword args or both. Return
|
|
|
|
the message and kwargs modified (or not) to suit your needs.
|
|
|
|
|
|
|
|
Normally, you'll only need to override this one method in a
|
|
|
|
LoggerAdapter subclass for your specific needs.
|
|
|
|
"""
|
|
|
|
kwargs["extra"] = self.extra
|
|
|
|
return msg, kwargs
|
|
|
|
|
|
|
|
def debug(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Delegate a debug call to the underlying logger, after adding
|
|
|
|
contextual information from this adapter instance.
|
|
|
|
"""
|
|
|
|
msg, kwargs = self.process(msg, kwargs)
|
|
|
|
self.logger.debug(msg, *args, **kwargs)
|
|
|
|
|
|
|
|
def info(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Delegate an info call to the underlying logger, after adding
|
|
|
|
contextual information from this adapter instance.
|
|
|
|
"""
|
|
|
|
msg, kwargs = self.process(msg, kwargs)
|
|
|
|
self.logger.info(msg, *args, **kwargs)
|
|
|
|
|
|
|
|
def warning(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Delegate a warning call to the underlying logger, after adding
|
|
|
|
contextual information from this adapter instance.
|
|
|
|
"""
|
|
|
|
msg, kwargs = self.process(msg, kwargs)
|
|
|
|
self.logger.warning(msg, *args, **kwargs)
|
|
|
|
|
|
|
|
def error(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Delegate an error call to the underlying logger, after adding
|
|
|
|
contextual information from this adapter instance.
|
|
|
|
"""
|
|
|
|
msg, kwargs = self.process(msg, kwargs)
|
|
|
|
self.logger.error(msg, *args, **kwargs)
|
|
|
|
|
|
|
|
def exception(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Delegate an exception call to the underlying logger, after adding
|
|
|
|
contextual information from this adapter instance.
|
|
|
|
"""
|
|
|
|
msg, kwargs = self.process(msg, kwargs)
|
|
|
|
kwargs["exc_info"] = 1
|
|
|
|
self.logger.error(msg, *args, **kwargs)
|
|
|
|
|
|
|
|
def critical(self, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Delegate a critical call to the underlying logger, after adding
|
|
|
|
contextual information from this adapter instance.
|
|
|
|
"""
|
|
|
|
msg, kwargs = self.process(msg, kwargs)
|
|
|
|
self.logger.critical(msg, *args, **kwargs)
|
|
|
|
|
|
|
|
def log(self, level, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Delegate a log call to the underlying logger, after adding
|
|
|
|
contextual information from this adapter instance.
|
|
|
|
"""
|
|
|
|
msg, kwargs = self.process(msg, kwargs)
|
|
|
|
self.logger.log(level, msg, *args, **kwargs)
|
|
|
|
|
2010-03-22 12:29:01 -03:00
|
|
|
def isEnabledFor(self, level):
|
|
|
|
"""
|
|
|
|
See if the underlying logger is enabled for the specified level.
|
|
|
|
"""
|
|
|
|
return self.logger.isEnabledFor(level)
|
|
|
|
|
2003-02-18 10:20:07 -04:00
|
|
|
root = RootLogger(WARNING)
|
2002-11-13 12:15:58 -04:00
|
|
|
Logger.root = root
|
|
|
|
Logger.manager = Manager(Logger.root)
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Configuration classes and functions
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
|
|
|
|
|
2004-07-03 08:47:26 -03:00
|
|
|
def basicConfig(**kwargs):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2004-07-03 08:47:26 -03:00
|
|
|
Do basic configuration for the logging system.
|
|
|
|
|
|
|
|
This function does nothing if the root logger already has handlers
|
|
|
|
configured. It is a convenience method intended for use by simple scripts
|
|
|
|
to do one-shot configuration of the logging package.
|
|
|
|
|
|
|
|
The default behaviour is to create a StreamHandler which writes to
|
|
|
|
sys.stderr, set a formatter using the BASIC_FORMAT format string, and
|
|
|
|
add the handler to the root logger.
|
|
|
|
|
|
|
|
A number of optional keyword arguments may be specified, which can alter
|
|
|
|
the default behaviour.
|
|
|
|
|
|
|
|
filename Specifies that a FileHandler be created, using the specified
|
|
|
|
filename, rather than a StreamHandler.
|
|
|
|
filemode Specifies the mode to open the file, if filename is specified
|
2005-03-13 05:54:31 -04:00
|
|
|
(if filemode is unspecified, it defaults to 'a').
|
2004-07-03 08:47:26 -03:00
|
|
|
format Use the specified format string for the handler.
|
|
|
|
datefmt Use the specified date/time format.
|
|
|
|
level Set the root logger level to the specified level.
|
|
|
|
stream Use the specified stream to initialize the StreamHandler. Note
|
|
|
|
that this argument is incompatible with 'filename' - if both
|
|
|
|
are present, 'stream' is ignored.
|
|
|
|
|
|
|
|
Note that you could specify a stream created using open(filename, mode)
|
|
|
|
rather than passing the filename and mode in. However, it should be
|
|
|
|
remembered that StreamHandler does not close its stream (since it may be
|
|
|
|
using sys.stdout or sys.stderr), whereas FileHandler closes its stream
|
|
|
|
when the handler is closed.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2010-09-11 06:06:21 -03:00
|
|
|
# Add thread safety in case someone mistakenly calls
|
|
|
|
# basicConfig() from multiple threads
|
|
|
|
_acquireLock()
|
|
|
|
try:
|
|
|
|
if len(root.handlers) == 0:
|
|
|
|
filename = kwargs.get("filename")
|
|
|
|
if filename:
|
|
|
|
mode = kwargs.get("filemode", 'a')
|
|
|
|
hdlr = FileHandler(filename, mode)
|
|
|
|
else:
|
|
|
|
stream = kwargs.get("stream")
|
|
|
|
hdlr = StreamHandler(stream)
|
|
|
|
fs = kwargs.get("format", BASIC_FORMAT)
|
|
|
|
dfs = kwargs.get("datefmt", None)
|
|
|
|
fmt = Formatter(fs, dfs)
|
|
|
|
hdlr.setFormatter(fmt)
|
|
|
|
root.addHandler(hdlr)
|
|
|
|
level = kwargs.get("level")
|
|
|
|
if level is not None:
|
|
|
|
root.setLevel(level)
|
|
|
|
finally:
|
|
|
|
_releaseLock()
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Utility functions at module level.
|
|
|
|
# Basically delegate everything to the root logger.
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def getLogger(name=None):
|
|
|
|
"""
|
|
|
|
Return a logger with the specified name, creating it if necessary.
|
|
|
|
|
|
|
|
If no name is specified, return the root logger.
|
|
|
|
"""
|
|
|
|
if name:
|
|
|
|
return Logger.manager.getLogger(name)
|
|
|
|
else:
|
|
|
|
return root
|
|
|
|
|
|
|
|
#def getRootLogger():
|
|
|
|
# """
|
|
|
|
# Return the root logger.
|
|
|
|
#
|
|
|
|
# Note that getLogger('') now does the same thing, so this function is
|
|
|
|
# deprecated and may disappear in the future.
|
|
|
|
# """
|
|
|
|
# return root
|
|
|
|
|
|
|
|
def critical(msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log a message with severity 'CRITICAL' on the root logger.
|
|
|
|
"""
|
|
|
|
if len(root.handlers) == 0:
|
|
|
|
basicConfig()
|
2009-10-10 17:32:36 -03:00
|
|
|
root.critical(msg, *args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
fatal = critical
|
|
|
|
|
|
|
|
def error(msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log a message with severity 'ERROR' on the root logger.
|
|
|
|
"""
|
|
|
|
if len(root.handlers) == 0:
|
|
|
|
basicConfig()
|
2009-10-10 17:32:36 -03:00
|
|
|
root.error(msg, *args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def exception(msg, *args):
|
|
|
|
"""
|
|
|
|
Log a message with severity 'ERROR' on the root logger,
|
|
|
|
with exception information.
|
|
|
|
"""
|
2009-10-10 17:32:36 -03:00
|
|
|
error(msg, exc_info=1, *args)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2003-02-18 10:20:07 -04:00
|
|
|
def warning(msg, *args, **kwargs):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
2003-02-18 10:20:07 -04:00
|
|
|
Log a message with severity 'WARNING' on the root logger.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
if len(root.handlers) == 0:
|
|
|
|
basicConfig()
|
2009-10-10 17:32:36 -03:00
|
|
|
root.warning(msg, *args, **kwargs)
|
2003-02-18 10:20:07 -04:00
|
|
|
|
|
|
|
warn = warning
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def info(msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log a message with severity 'INFO' on the root logger.
|
|
|
|
"""
|
|
|
|
if len(root.handlers) == 0:
|
|
|
|
basicConfig()
|
2009-10-10 17:32:36 -03:00
|
|
|
root.info(msg, *args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
|
|
|
def debug(msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log a message with severity 'DEBUG' on the root logger.
|
|
|
|
"""
|
|
|
|
if len(root.handlers) == 0:
|
|
|
|
basicConfig()
|
2009-10-10 17:32:36 -03:00
|
|
|
root.debug(msg, *args, **kwargs)
|
2002-11-13 12:15:58 -04:00
|
|
|
|
2004-09-24 08:45:52 -03:00
|
|
|
def log(level, msg, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Log 'msg % args' with the integer severity 'level' on the root logger.
|
|
|
|
"""
|
|
|
|
if len(root.handlers) == 0:
|
|
|
|
basicConfig()
|
2009-10-10 17:32:36 -03:00
|
|
|
root.log(level, msg, *args, **kwargs)
|
2004-09-24 08:45:52 -03:00
|
|
|
|
2002-11-13 12:15:58 -04:00
|
|
|
def disable(level):
|
|
|
|
"""
|
2010-03-17 12:05:57 -03:00
|
|
|
Disable all logging calls of severity 'level' and below.
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
root.manager.disable = level
|
|
|
|
|
2006-08-11 04:26:10 -03:00
|
|
|
def shutdown(handlerList=_handlerList):
|
2002-11-13 12:15:58 -04:00
|
|
|
"""
|
|
|
|
Perform any cleanup actions in the logging system (e.g. flushing
|
|
|
|
buffers).
|
|
|
|
|
|
|
|
Should be called at application exit.
|
|
|
|
"""
|
2009-11-25 05:03:30 -04:00
|
|
|
for wr in reversed(handlerList[:]):
|
2004-07-29 06:19:30 -03:00
|
|
|
#errors might occur, for example, if files are locked
|
2006-02-09 04:34:14 -04:00
|
|
|
#we just ignore them if raiseExceptions is not set
|
2004-07-29 06:19:30 -03:00
|
|
|
try:
|
2009-11-25 05:03:30 -04:00
|
|
|
h = wr()
|
2010-08-23 14:50:30 -03:00
|
|
|
if h:
|
|
|
|
try:
|
2011-03-08 18:47:18 -04:00
|
|
|
h.acquire()
|
2010-08-23 14:50:30 -03:00
|
|
|
h.flush()
|
|
|
|
h.close()
|
|
|
|
except (IOError, ValueError):
|
|
|
|
# Ignore errors which might be caused
|
|
|
|
# because handlers have been closed but
|
|
|
|
# references to them are still around at
|
|
|
|
# application exit.
|
|
|
|
pass
|
2011-03-08 18:47:18 -04:00
|
|
|
finally:
|
|
|
|
h.release()
|
2004-07-29 06:19:30 -03:00
|
|
|
except:
|
2006-02-09 04:34:14 -04:00
|
|
|
if raiseExceptions:
|
|
|
|
raise
|
|
|
|
#else, swallow
|
2004-02-20 09:18:36 -04:00
|
|
|
|
|
|
|
#Let's try and shutdown automatically on application exit...
|
2010-02-07 08:56:54 -04:00
|
|
|
import atexit
|
|
|
|
atexit.register(shutdown)
|
2008-12-03 19:22:58 -04:00
|
|
|
|
|
|
|
# Null handler
|
|
|
|
|
|
|
|
class NullHandler(Handler):
|
|
|
|
"""
|
|
|
|
This handler does nothing. It's intended to be used to avoid the
|
|
|
|
"No handlers could be found for logger XXX" one-off warning. This is
|
|
|
|
important for library code, which may contain code to log events. If a user
|
|
|
|
of the library does not configure logging, the one-off warning might be
|
|
|
|
produced; to avoid this, the library developer simply needs to instantiate
|
|
|
|
a NullHandler and add it to the top-level logger of the library module or
|
|
|
|
package.
|
|
|
|
"""
|
2010-09-26 08:04:10 -03:00
|
|
|
def handle(self, record):
|
|
|
|
pass
|
|
|
|
|
2008-12-03 19:22:58 -04:00
|
|
|
def emit(self, record):
|
|
|
|
pass
|
|
|
|
|
2010-09-26 08:04:10 -03:00
|
|
|
def createLock(self):
|
|
|
|
self.lock = None
|
|
|
|
|
2008-12-03 19:22:58 -04:00
|
|
|
# Warnings integration
|
|
|
|
|
|
|
|
_warnings_showwarning = None
|
|
|
|
|
|
|
|
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
|
|
|
"""
|
|
|
|
Implementation of showwarnings which redirects to logging, which will first
|
|
|
|
check to see if the file parameter is None. If a file is specified, it will
|
|
|
|
delegate to the original warnings implementation of showwarning. Otherwise,
|
|
|
|
it will call warnings.formatwarning and will log the resulting string to a
|
|
|
|
warnings logger named "py.warnings" with level logging.WARNING.
|
|
|
|
"""
|
|
|
|
if file is not None:
|
|
|
|
if _warnings_showwarning is not None:
|
|
|
|
_warnings_showwarning(message, category, filename, lineno, file, line)
|
|
|
|
else:
|
|
|
|
s = warnings.formatwarning(message, category, filename, lineno, line)
|
|
|
|
logger = getLogger("py.warnings")
|
|
|
|
if not logger.handlers:
|
|
|
|
logger.addHandler(NullHandler())
|
|
|
|
logger.warning("%s", s)
|
|
|
|
|
|
|
|
def captureWarnings(capture):
|
|
|
|
"""
|
|
|
|
If capture is true, redirect all warnings to the logging package.
|
|
|
|
If capture is False, ensure that warnings are not redirected to logging
|
|
|
|
but to their original destinations.
|
|
|
|
"""
|
|
|
|
global _warnings_showwarning
|
|
|
|
if capture:
|
|
|
|
if _warnings_showwarning is None:
|
|
|
|
_warnings_showwarning = warnings.showwarning
|
|
|
|
warnings.showwarning = _showwarning
|
|
|
|
else:
|
|
|
|
if _warnings_showwarning is not None:
|
|
|
|
warnings.showwarning = _warnings_showwarning
|
|
|
|
_warnings_showwarning = None
|