2000-02-04 11:28:42 -04:00
|
|
|
"""An extensible library for opening URLs using a variety of protocols
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
The simplest way to use this module is to call the urlopen function,
|
2001-01-14 23:34:38 -04:00
|
|
|
which accepts a string containing a URL or a Request object (described
|
2000-01-20 14:19:08 -04:00
|
|
|
below). It opens the URL and returns the results as file-like
|
|
|
|
object; the returned object has some extra methods described below.
|
|
|
|
|
2002-10-11 14:27:55 -03:00
|
|
|
The OpenerDirector manages a collection of Handler objects that do
|
2001-01-14 23:34:38 -04:00
|
|
|
all the actual work. Each Handler implements a particular protocol or
|
2000-01-20 14:19:08 -04:00
|
|
|
option. The OpenerDirector is a composite object that invokes the
|
|
|
|
Handlers needed to open the requested URL. For example, the
|
|
|
|
HTTPHandler performs HTTP GET and POST requests and deals with
|
|
|
|
non-error returns. The HTTPRedirectHandler automatically deals with
|
2003-04-24 12:32:12 -03:00
|
|
|
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
|
|
|
|
deals with digest authentication.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2007-03-05 12:31:54 -04:00
|
|
|
urlopen(url, data=None) -- Basic usage is the same as original
|
2000-01-20 14:19:08 -04:00
|
|
|
urllib. pass the url and optionally data to post to an HTTP URL, and
|
2001-01-14 23:34:38 -04:00
|
|
|
get a file-like object back. One difference is that you can also pass
|
2000-01-20 14:19:08 -04:00
|
|
|
a Request instance instead of URL. Raises a URLError (subclass of
|
|
|
|
IOError); for HTTP errors, raises an HTTPError, which can also be
|
|
|
|
treated as a valid response.
|
|
|
|
|
2007-03-05 12:31:54 -04:00
|
|
|
build_opener -- Function that creates a new OpenerDirector instance.
|
|
|
|
Will install the default handlers. Accepts one or more Handlers as
|
2000-01-20 14:19:08 -04:00
|
|
|
arguments, either instances or Handler classes that it will
|
2007-03-05 12:31:54 -04:00
|
|
|
instantiate. If one of the argument is a subclass of the default
|
2000-01-20 14:19:08 -04:00
|
|
|
handler, the argument will be installed instead of the default.
|
|
|
|
|
2007-03-05 12:31:54 -04:00
|
|
|
install_opener -- Installs a new opener as the default opener.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
objects of interest:
|
2009-11-15 02:10:30 -04:00
|
|
|
|
|
|
|
OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
|
|
|
|
the Handler classes, while dealing with requests and responses.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2007-03-05 12:31:54 -04:00
|
|
|
Request -- An object that encapsulates the state of a request. The
|
|
|
|
state can be as simple as the URL. It can also include extra HTTP
|
2000-01-20 14:19:08 -04:00
|
|
|
headers, e.g. a User-Agent.
|
|
|
|
|
|
|
|
BaseHandler --
|
|
|
|
|
|
|
|
exceptions:
|
2007-03-05 12:31:54 -04:00
|
|
|
URLError -- A subclass of IOError, individual protocols have their own
|
|
|
|
specific subclass.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2007-03-05 12:31:54 -04:00
|
|
|
HTTPError -- Also a valid HTTP response, so you can treat an HTTP error
|
|
|
|
as an exceptional event or valid response.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
internals:
|
|
|
|
BaseHandler and parent
|
|
|
|
_call_chain conventions
|
|
|
|
|
|
|
|
Example usage:
|
|
|
|
|
|
|
|
import urllib2
|
|
|
|
|
|
|
|
# set up authentication info
|
|
|
|
authinfo = urllib2.HTTPBasicAuthHandler()
|
2007-04-24 01:53:12 -03:00
|
|
|
authinfo.add_password(realm='PDQ Application',
|
|
|
|
uri='https://mahler:8092/site-updates.py',
|
|
|
|
user='klem',
|
|
|
|
passwd='geheim$parole')
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
|
|
|
|
|
2001-01-14 23:34:38 -04:00
|
|
|
# build a new opener that adds authentication and caching FTP handlers
|
2001-03-01 04:40:42 -04:00
|
|
|
opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
# install it
|
|
|
|
urllib2.install_opener(opener)
|
|
|
|
|
|
|
|
f = urllib2.urlopen('http://www.python.org/')
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
# XXX issues:
|
|
|
|
# If an authentication error handler that tries to perform
|
2001-11-08 13:19:29 -04:00
|
|
|
# authentication for some reason but fails, how should the error be
|
|
|
|
# signalled? The client needs to know the HTTP error code. But if
|
|
|
|
# the handler knows that the problem was, e.g., that it didn't know
|
|
|
|
# that hash algo that requested in the challenge, it would be good to
|
|
|
|
# pass that information along to the client, too.
|
2000-01-20 14:19:08 -04:00
|
|
|
# ftp errors aren't handled cleanly
|
|
|
|
# check digest against correct (i.e. non-apache) implementation
|
|
|
|
|
2006-04-02 17:48:11 -03:00
|
|
|
# Possible extensions:
|
|
|
|
# complex proxies XXX not sure what exactly was meant by this
|
|
|
|
# abstract factory for opener
|
|
|
|
|
2003-10-21 15:07:07 -03:00
|
|
|
import base64
|
2006-04-30 05:57:35 -03:00
|
|
|
import hashlib
|
2006-05-17 12:17:00 -03:00
|
|
|
import httplib
|
2000-01-20 14:19:08 -04:00
|
|
|
import mimetools
|
2003-10-21 15:07:07 -03:00
|
|
|
import os
|
|
|
|
import posixpath
|
|
|
|
import random
|
|
|
|
import re
|
|
|
|
import socket
|
2000-01-20 14:19:08 -04:00
|
|
|
import sys
|
|
|
|
import time
|
2003-10-21 15:07:07 -03:00
|
|
|
import urlparse
|
2003-12-14 01:27:34 -04:00
|
|
|
import bisect
|
2012-05-15 12:59:19 -03:00
|
|
|
import warnings
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
try:
|
|
|
|
from cStringIO import StringIO
|
|
|
|
except ImportError:
|
|
|
|
from StringIO import StringIO
|
|
|
|
|
2014-11-23 13:42:45 -04:00
|
|
|
# check for SSL
|
|
|
|
try:
|
|
|
|
import ssl
|
|
|
|
except ImportError:
|
|
|
|
_have_ssl = False
|
|
|
|
else:
|
|
|
|
_have_ssl = True
|
|
|
|
|
2006-04-02 18:13:13 -03:00
|
|
|
from urllib import (unwrap, unquote, splittype, splithost, quote,
|
2012-07-07 21:37:53 -03:00
|
|
|
addinfourl, splitport, splittag, toBytes,
|
2008-08-17 21:46:22 -03:00
|
|
|
splitattr, ftpwrapper, splituser, splitpasswd, splitvalue)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-10-21 15:07:07 -03:00
|
|
|
# support for FileHandler, proxies via environment variables
|
2009-10-10 23:00:07 -03:00
|
|
|
from urllib import localhost, url2pathname, getproxies, proxy_bypass
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2006-04-02 17:45:34 -03:00
|
|
|
# used in User-Agent header sent
|
|
|
|
__version__ = sys.version[:3]
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
_opener = None
|
2014-11-23 13:42:45 -04:00
|
|
|
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
|
|
|
cafile=None, capath=None, cadefault=False, context=None):
|
2000-01-20 14:19:08 -04:00
|
|
|
global _opener
|
2014-11-23 13:42:45 -04:00
|
|
|
if cafile or capath or cadefault:
|
|
|
|
if context is not None:
|
|
|
|
raise ValueError(
|
|
|
|
"You can't pass both context and any of cafile, capath, and "
|
|
|
|
"cadefault"
|
|
|
|
)
|
|
|
|
if not _have_ssl:
|
|
|
|
raise ValueError('SSL support not available')
|
2014-12-07 14:41:26 -04:00
|
|
|
context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH,
|
2014-11-23 13:42:45 -04:00
|
|
|
cafile=cafile,
|
|
|
|
capath=capath)
|
2014-12-07 14:41:26 -04:00
|
|
|
https_handler = HTTPSHandler(context=context)
|
2014-11-23 13:42:45 -04:00
|
|
|
opener = build_opener(https_handler)
|
|
|
|
elif context:
|
|
|
|
https_handler = HTTPSHandler(context=context)
|
|
|
|
opener = build_opener(https_handler)
|
|
|
|
elif _opener is None:
|
|
|
|
_opener = opener = build_opener()
|
|
|
|
else:
|
|
|
|
opener = _opener
|
|
|
|
return opener.open(url, data, timeout)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def install_opener(opener):
|
|
|
|
global _opener
|
|
|
|
_opener = opener
|
|
|
|
|
|
|
|
# do these error classes make sense?
|
2001-01-14 23:34:38 -04:00
|
|
|
# make sure all of the IOError stuff is overridden. we just want to be
|
2002-08-13 10:59:55 -03:00
|
|
|
# subtypes.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
class URLError(IOError):
|
|
|
|
# URLError is a sub-type of IOError, but it doesn't share any of
|
2003-10-06 02:15:13 -03:00
|
|
|
# the implementation. need to override __init__ and __str__.
|
|
|
|
# It sets self.args for compatibility with other EnvironmentError
|
|
|
|
# subclasses, but args doesn't have the typical format with errno in
|
|
|
|
# slot 0 and strerror in slot 1. This may be better than nothing.
|
2000-01-20 14:19:08 -04:00
|
|
|
def __init__(self, reason):
|
2003-10-06 02:15:13 -03:00
|
|
|
self.args = reason,
|
2000-02-10 13:17:14 -04:00
|
|
|
self.reason = reason
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def __str__(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
return '<urlopen error %s>' % self.reason
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
class HTTPError(URLError, addinfourl):
|
|
|
|
"""Raised when HTTP error occurs, but also acts like non-error return"""
|
2000-10-12 15:54:18 -03:00
|
|
|
__super_init = addinfourl.__init__
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def __init__(self, url, code, msg, hdrs, fp):
|
2000-02-10 13:17:14 -04:00
|
|
|
self.code = code
|
|
|
|
self.msg = msg
|
|
|
|
self.hdrs = hdrs
|
|
|
|
self.fp = fp
|
|
|
|
self.filename = url
|
2002-06-03 13:53:00 -03:00
|
|
|
# The addinfourl classes depend on fp being a valid file
|
|
|
|
# object. In some cases, the HTTPError may not have a valid
|
|
|
|
# file object. If this happens, the simplest workaround is to
|
2002-07-16 18:35:23 -03:00
|
|
|
# not initialize the base classes.
|
2002-06-03 13:53:00 -03:00
|
|
|
if fp is not None:
|
2008-04-09 14:57:38 -03:00
|
|
|
self.__super_init(fp, hdrs, url, code)
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def __str__(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
return 'HTTP Error %s: %s' % (self.code, self.msg)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2011-11-07 11:44:25 -04:00
|
|
|
# since URLError specifies a .reason attribute, HTTPError should also
|
|
|
|
# provide this attribute. See issue13211 fo discussion.
|
|
|
|
@property
|
|
|
|
def reason(self):
|
|
|
|
return self.msg
|
|
|
|
|
2012-12-23 13:00:47 -04:00
|
|
|
def info(self):
|
|
|
|
return self.hdrs
|
|
|
|
|
2006-05-17 12:17:00 -03:00
|
|
|
# copied from cookielib.py
|
2006-05-18 03:51:46 -03:00
|
|
|
_cut_port_re = re.compile(r":\d+$")
|
2006-05-17 12:17:00 -03:00
|
|
|
def request_host(request):
|
|
|
|
"""Return request-host, as defined by RFC 2965.
|
|
|
|
|
|
|
|
Variation from RFC: returned value is lowercased, for convenient
|
|
|
|
comparison.
|
|
|
|
|
|
|
|
"""
|
|
|
|
url = request.get_full_url()
|
|
|
|
host = urlparse.urlparse(url)[1]
|
|
|
|
if host == "":
|
|
|
|
host = request.get_header("Host", "")
|
|
|
|
|
|
|
|
# remove port, if present
|
2006-05-18 03:51:46 -03:00
|
|
|
host = _cut_port_re.sub("", host, 1)
|
2006-05-17 12:17:00 -03:00
|
|
|
return host.lower()
|
2001-03-01 04:40:42 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
class Request:
|
2001-03-01 04:40:42 -04:00
|
|
|
|
2004-05-31 15:22:40 -03:00
|
|
|
def __init__(self, url, data=None, headers={},
|
|
|
|
origin_req_host=None, unverifiable=False):
|
2000-02-10 13:17:14 -04:00
|
|
|
# unwrap('<URL:type://host/path>') --> 'type://host/path'
|
2012-07-08 06:20:27 -03:00
|
|
|
self.__original = unwrap(url)
|
2011-04-12 20:31:45 -03:00
|
|
|
self.__original, self.__fragment = splittag(self.__original)
|
2000-02-10 13:17:14 -04:00
|
|
|
self.type = None
|
|
|
|
# self.__r_type is what's left after doing the splittype
|
|
|
|
self.host = None
|
|
|
|
self.port = None
|
2009-05-24 06:14:50 -03:00
|
|
|
self._tunnel_host = None
|
2000-01-20 14:19:08 -04:00
|
|
|
self.data = data
|
2000-02-10 13:17:14 -04:00
|
|
|
self.headers = {}
|
2003-05-17 16:51:26 -03:00
|
|
|
for key, value in headers.items():
|
2003-05-12 04:29:42 -03:00
|
|
|
self.add_header(key, value)
|
2003-12-14 01:27:34 -04:00
|
|
|
self.unredirected_hdrs = {}
|
2004-05-31 15:22:40 -03:00
|
|
|
if origin_req_host is None:
|
2006-05-17 12:17:00 -03:00
|
|
|
origin_req_host = request_host(self)
|
2004-05-31 15:22:40 -03:00
|
|
|
self.origin_req_host = origin_req_host
|
|
|
|
self.unverifiable = unverifiable
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
2000-02-10 13:17:14 -04:00
|
|
|
# XXX this is a fallback mechanism to guard against these
|
2001-01-14 23:34:38 -04:00
|
|
|
# methods getting called in a non-standard order. this may be
|
2000-02-10 13:17:14 -04:00
|
|
|
# too complicated and/or unnecessary.
|
|
|
|
# XXX should the __r_XXX attributes be public?
|
2016-01-18 04:35:40 -04:00
|
|
|
if attr in ('_Request__r_type', '_Request__r_host'):
|
|
|
|
getattr(self, 'get_' + attr[12:])()
|
|
|
|
return self.__dict__[attr]
|
2000-02-10 13:17:14 -04:00
|
|
|
raise AttributeError, attr
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-04-24 12:32:12 -03:00
|
|
|
def get_method(self):
|
|
|
|
if self.has_data():
|
|
|
|
return "POST"
|
|
|
|
else:
|
|
|
|
return "GET"
|
|
|
|
|
2003-12-17 14:52:16 -04:00
|
|
|
# XXX these helper methods are lame
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def add_data(self, data):
|
|
|
|
self.data = data
|
|
|
|
|
|
|
|
def has_data(self):
|
|
|
|
return self.data is not None
|
|
|
|
|
|
|
|
def get_data(self):
|
|
|
|
return self.data
|
|
|
|
|
|
|
|
def get_full_url(self):
|
2011-04-12 20:31:45 -03:00
|
|
|
if self.__fragment:
|
|
|
|
return '%s#%s' % (self.__original, self.__fragment)
|
|
|
|
else:
|
|
|
|
return self.__original
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def get_type(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
if self.type is None:
|
|
|
|
self.type, self.__r_type = splittype(self.__original)
|
2001-05-09 12:49:24 -03:00
|
|
|
if self.type is None:
|
|
|
|
raise ValueError, "unknown url type: %s" % self.__original
|
2000-02-10 13:17:14 -04:00
|
|
|
return self.type
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def get_host(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
if self.host is None:
|
|
|
|
self.host, self.__r_host = splithost(self.__r_type)
|
|
|
|
if self.host:
|
|
|
|
self.host = unquote(self.host)
|
|
|
|
return self.host
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def get_selector(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
return self.__r_host
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
def set_proxy(self, host, type):
|
2009-05-24 06:14:50 -03:00
|
|
|
if self.type == 'https' and not self._tunnel_host:
|
|
|
|
self._tunnel_host = self.host
|
|
|
|
else:
|
|
|
|
self.type = type
|
|
|
|
self.__r_host = self.__original
|
|
|
|
|
|
|
|
self.host = host
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2008-08-16 11:44:07 -03:00
|
|
|
def has_proxy(self):
|
|
|
|
return self.__r_host == self.__original
|
|
|
|
|
2004-05-31 15:22:40 -03:00
|
|
|
def get_origin_req_host(self):
|
|
|
|
return self.origin_req_host
|
|
|
|
|
|
|
|
def is_unverifiable(self):
|
|
|
|
return self.unverifiable
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def add_header(self, key, val):
|
2000-02-10 13:17:14 -04:00
|
|
|
# useful for something like authentication
|
2006-08-20 10:15:39 -03:00
|
|
|
self.headers[key.capitalize()] = val
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-12-14 01:27:34 -04:00
|
|
|
def add_unredirected_header(self, key, val):
|
|
|
|
# will not be added to a redirected request
|
2006-08-20 10:15:39 -03:00
|
|
|
self.unredirected_hdrs[key.capitalize()] = val
|
2003-12-14 01:27:34 -04:00
|
|
|
|
|
|
|
def has_header(self, header_name):
|
2004-06-07 00:49:50 -03:00
|
|
|
return (header_name in self.headers or
|
|
|
|
header_name in self.unredirected_hdrs)
|
2003-12-14 01:27:34 -04:00
|
|
|
|
2004-05-31 15:22:40 -03:00
|
|
|
def get_header(self, header_name, default=None):
|
|
|
|
return self.headers.get(
|
|
|
|
header_name,
|
|
|
|
self.unredirected_hdrs.get(header_name, default))
|
|
|
|
|
|
|
|
def header_items(self):
|
|
|
|
hdrs = self.unredirected_hdrs.copy()
|
|
|
|
hdrs.update(self.headers)
|
|
|
|
return hdrs.items()
|
2003-12-14 01:27:34 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
class OpenerDirector:
|
|
|
|
def __init__(self):
|
2005-06-26 19:01:35 -03:00
|
|
|
client_version = "Python-urllib/%s" % __version__
|
2006-08-20 10:15:39 -03:00
|
|
|
self.addheaders = [('User-agent', client_version)]
|
2010-12-23 15:50:56 -04:00
|
|
|
# self.handlers is retained only for backward compatibility
|
2000-01-20 14:19:08 -04:00
|
|
|
self.handlers = []
|
2010-12-23 15:50:56 -04:00
|
|
|
# manage the individual handlers
|
2000-01-20 14:19:08 -04:00
|
|
|
self.handle_open = {}
|
|
|
|
self.handle_error = {}
|
2003-12-14 01:27:34 -04:00
|
|
|
self.process_response = {}
|
|
|
|
self.process_request = {}
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def add_handler(self, handler):
|
2007-07-12 05:05:45 -03:00
|
|
|
if not hasattr(handler, "add_parent"):
|
|
|
|
raise TypeError("expected BaseHandler instance, got %r" %
|
|
|
|
type(handler))
|
|
|
|
|
2003-12-14 01:27:34 -04:00
|
|
|
added = False
|
2001-10-09 13:18:45 -03:00
|
|
|
for meth in dir(handler):
|
2006-05-29 17:52:54 -03:00
|
|
|
if meth in ["redirect_request", "do_open", "proxy_open"]:
|
|
|
|
# oops, coincidental match
|
|
|
|
continue
|
|
|
|
|
2003-12-14 01:27:34 -04:00
|
|
|
i = meth.find("_")
|
|
|
|
protocol = meth[:i]
|
|
|
|
condition = meth[i+1:]
|
|
|
|
|
|
|
|
if condition.startswith("error"):
|
2004-06-07 00:49:50 -03:00
|
|
|
j = condition.find("_") + i + 1
|
2000-01-20 14:19:08 -04:00
|
|
|
kind = meth[j+1:]
|
|
|
|
try:
|
2001-02-09 07:10:16 -04:00
|
|
|
kind = int(kind)
|
2000-01-20 14:19:08 -04:00
|
|
|
except ValueError:
|
|
|
|
pass
|
2003-12-14 01:27:34 -04:00
|
|
|
lookup = self.handle_error.get(protocol, {})
|
|
|
|
self.handle_error[protocol] = lookup
|
|
|
|
elif condition == "open":
|
|
|
|
kind = protocol
|
2005-02-05 10:37:06 -04:00
|
|
|
lookup = self.handle_open
|
|
|
|
elif condition == "response":
|
2003-12-14 01:27:34 -04:00
|
|
|
kind = protocol
|
2005-02-05 10:37:06 -04:00
|
|
|
lookup = self.process_response
|
|
|
|
elif condition == "request":
|
|
|
|
kind = protocol
|
|
|
|
lookup = self.process_request
|
2003-12-14 01:27:34 -04:00
|
|
|
else:
|
2000-01-20 14:19:08 -04:00
|
|
|
continue
|
2003-12-14 01:27:34 -04:00
|
|
|
|
|
|
|
handlers = lookup.setdefault(kind, [])
|
|
|
|
if handlers:
|
|
|
|
bisect.insort(handlers, handler)
|
|
|
|
else:
|
|
|
|
handlers.append(handler)
|
|
|
|
added = True
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
if added:
|
2003-12-14 01:27:34 -04:00
|
|
|
bisect.insort(self.handlers, handler)
|
2000-01-20 14:19:08 -04:00
|
|
|
handler.add_parent(self)
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def close(self):
|
2003-12-15 12:08:48 -04:00
|
|
|
# Only exists for backwards compatibility.
|
|
|
|
pass
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def _call_chain(self, chain, kind, meth_name, *args):
|
2006-04-02 17:48:11 -03:00
|
|
|
# Handlers raise an exception if no one else should try to handle
|
|
|
|
# the request, or return None if they can't but another handler
|
|
|
|
# could. Otherwise, they return the response.
|
2000-01-20 14:19:08 -04:00
|
|
|
handlers = chain.get(kind, ())
|
|
|
|
for handler in handlers:
|
|
|
|
func = getattr(handler, meth_name)
|
2000-10-12 15:54:18 -03:00
|
|
|
|
|
|
|
result = func(*args)
|
2000-01-20 14:19:08 -04:00
|
|
|
if result is not None:
|
|
|
|
return result
|
|
|
|
|
2008-05-29 13:39:26 -03:00
|
|
|
def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
|
2000-02-10 13:17:14 -04:00
|
|
|
# accept a URL or a Request object
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
if isinstance(fullurl, basestring):
|
2000-02-10 13:17:14 -04:00
|
|
|
req = Request(fullurl, data)
|
2000-01-20 14:19:08 -04:00
|
|
|
else:
|
|
|
|
req = fullurl
|
|
|
|
if data is not None:
|
|
|
|
req.add_data(data)
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2007-06-06 14:15:23 -03:00
|
|
|
req.timeout = timeout
|
2003-12-14 01:27:34 -04:00
|
|
|
protocol = req.get_type()
|
|
|
|
|
|
|
|
# pre-process request
|
|
|
|
meth_name = protocol+"_request"
|
|
|
|
for processor in self.process_request.get(protocol, []):
|
|
|
|
meth = getattr(processor, meth_name)
|
|
|
|
req = meth(req)
|
|
|
|
|
|
|
|
response = self._open(req, data)
|
|
|
|
|
|
|
|
# post-process response
|
|
|
|
meth_name = protocol+"_response"
|
|
|
|
for processor in self.process_response.get(protocol, []):
|
|
|
|
meth = getattr(processor, meth_name)
|
|
|
|
response = meth(req, response)
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
def _open(self, req, data=None):
|
2000-01-20 14:19:08 -04:00
|
|
|
result = self._call_chain(self.handle_open, 'default',
|
2001-01-14 23:34:38 -04:00
|
|
|
'default_open', req)
|
2000-01-20 14:19:08 -04:00
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
|
2003-12-14 01:27:34 -04:00
|
|
|
protocol = req.get_type()
|
|
|
|
result = self._call_chain(self.handle_open, protocol, protocol +
|
2000-10-12 15:54:18 -03:00
|
|
|
'_open', req)
|
2000-01-20 14:19:08 -04:00
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
|
|
|
|
return self._call_chain(self.handle_open, 'unknown',
|
|
|
|
'unknown_open', req)
|
|
|
|
|
|
|
|
def error(self, proto, *args):
|
2005-02-06 02:57:08 -04:00
|
|
|
if proto in ('http', 'https'):
|
2001-11-08 13:19:29 -04:00
|
|
|
# XXX http[s] protocols are special-cased
|
|
|
|
dict = self.handle_error['http'] # https is not different than http
|
2000-01-20 14:19:08 -04:00
|
|
|
proto = args[2] # YUCK!
|
2003-12-14 01:27:34 -04:00
|
|
|
meth_name = 'http_error_%s' % proto
|
2000-01-20 14:19:08 -04:00
|
|
|
http_err = 1
|
|
|
|
orig_args = args
|
|
|
|
else:
|
|
|
|
dict = self.handle_error
|
|
|
|
meth_name = proto + '_error'
|
|
|
|
http_err = 0
|
|
|
|
args = (dict, proto, meth_name) + args
|
2000-10-12 15:54:18 -03:00
|
|
|
result = self._call_chain(*args)
|
2000-01-20 14:19:08 -04:00
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
|
|
|
|
if http_err:
|
|
|
|
args = (dict, 'default', 'http_error_default') + orig_args
|
2000-10-12 15:54:18 -03:00
|
|
|
return self._call_chain(*args)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-06-07 14:53:08 -03:00
|
|
|
# XXX probably also want an abstract factory that knows when it makes
|
|
|
|
# sense to skip a superclass in favor of a subclass and when it might
|
|
|
|
# make sense to include both
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def build_opener(*handlers):
|
|
|
|
"""Create an opener object from a list of handlers.
|
|
|
|
|
|
|
|
The opener will use several default handlers, including support
|
2009-11-15 02:10:30 -04:00
|
|
|
for HTTP, FTP and when applicable, HTTPS.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
If any of the handlers passed as arguments are subclasses of the
|
|
|
|
default handlers, the default handlers will not be used.
|
|
|
|
"""
|
2006-05-17 12:17:00 -03:00
|
|
|
import types
|
|
|
|
def isclass(obj):
|
2009-02-12 00:17:04 -04:00
|
|
|
return isinstance(obj, (types.ClassType, type))
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
opener = OpenerDirector()
|
|
|
|
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
|
|
|
|
HTTPDefaultErrorHandler, HTTPRedirectHandler,
|
2003-12-14 01:27:34 -04:00
|
|
|
FTPHandler, FileHandler, HTTPErrorProcessor]
|
2001-03-01 04:40:42 -04:00
|
|
|
if hasattr(httplib, 'HTTPS'):
|
|
|
|
default_classes.append(HTTPSHandler)
|
2008-04-22 18:14:41 -03:00
|
|
|
skip = set()
|
2000-01-20 14:19:08 -04:00
|
|
|
for klass in default_classes:
|
|
|
|
for check in handlers:
|
2006-05-17 12:17:00 -03:00
|
|
|
if isclass(check):
|
2000-01-20 14:19:08 -04:00
|
|
|
if issubclass(check, klass):
|
2008-04-22 18:14:41 -03:00
|
|
|
skip.add(klass)
|
2001-10-09 13:18:45 -03:00
|
|
|
elif isinstance(check, klass):
|
2008-04-22 18:14:41 -03:00
|
|
|
skip.add(klass)
|
2000-01-20 14:19:08 -04:00
|
|
|
for klass in skip:
|
|
|
|
default_classes.remove(klass)
|
|
|
|
|
|
|
|
for klass in default_classes:
|
|
|
|
opener.add_handler(klass())
|
|
|
|
|
|
|
|
for h in handlers:
|
2006-05-17 12:17:00 -03:00
|
|
|
if isclass(h):
|
2000-01-20 14:19:08 -04:00
|
|
|
h = h()
|
|
|
|
opener.add_handler(h)
|
|
|
|
return opener
|
|
|
|
|
|
|
|
class BaseHandler:
|
2003-06-07 14:53:08 -03:00
|
|
|
handler_order = 500
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def add_parent(self, parent):
|
|
|
|
self.parent = parent
|
2004-01-18 16:29:55 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def close(self):
|
2003-12-15 12:08:48 -04:00
|
|
|
# Only exists for backwards compatibility
|
|
|
|
pass
|
2004-01-18 16:29:55 -04:00
|
|
|
|
2003-06-07 14:53:08 -03:00
|
|
|
def __lt__(self, other):
|
|
|
|
if not hasattr(other, "handler_order"):
|
|
|
|
# Try to preserve the old behavior of having custom classes
|
|
|
|
# inserted after default ones (works only for custom user
|
|
|
|
# classes which are not aware of handler_order).
|
|
|
|
return True
|
|
|
|
return self.handler_order < other.handler_order
|
2003-06-15 20:26:30 -03:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-12-14 01:27:34 -04:00
|
|
|
class HTTPErrorProcessor(BaseHandler):
|
|
|
|
"""Process HTTP error responses."""
|
|
|
|
handler_order = 1000 # after all other processing
|
|
|
|
|
|
|
|
def http_response(self, request, response):
|
|
|
|
code, msg, hdrs = response.code, response.msg, response.info()
|
|
|
|
|
2007-04-25 03:30:05 -03:00
|
|
|
# According to RFC 2616, "2xx" code indicates that the client's
|
2007-04-23 14:08:31 -03:00
|
|
|
# request was successfully received, understood, and accepted.
|
|
|
|
if not (200 <= code < 300):
|
2003-12-14 01:27:34 -04:00
|
|
|
response = self.parent.error(
|
|
|
|
'http', request, response, code, msg, hdrs)
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
https_response = http_response
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
class HTTPDefaultErrorHandler(BaseHandler):
|
|
|
|
def http_error_default(self, req, fp, code, msg, hdrs):
|
2000-02-10 13:17:14 -04:00
|
|
|
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
class HTTPRedirectHandler(BaseHandler):
|
2004-05-31 15:22:40 -03:00
|
|
|
# maximum number of redirections to any single URL
|
|
|
|
# this is needed because of the state that cookies introduce
|
|
|
|
max_repeats = 4
|
|
|
|
# maximum total number of redirections (regardless of URL) before
|
|
|
|
# assuming we're in a loop
|
2003-12-14 01:27:34 -04:00
|
|
|
max_redirections = 10
|
|
|
|
|
2003-05-05 01:09:13 -03:00
|
|
|
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
2003-04-24 12:32:12 -03:00
|
|
|
"""Return a Request or None in response to a redirect.
|
|
|
|
|
2003-07-10 10:30:12 -03:00
|
|
|
This is called by the http_error_30x methods when a
|
|
|
|
redirection response is received. If a redirection should
|
|
|
|
take place, return a new Request to allow http_error_30x to
|
|
|
|
perform the redirect. Otherwise, raise HTTPError if no-one
|
|
|
|
else should try to handle this url. Return None if you can't
|
|
|
|
but another Handler might.
|
2003-04-24 12:32:12 -03:00
|
|
|
"""
|
2003-05-04 20:44:49 -03:00
|
|
|
m = req.get_method()
|
|
|
|
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
|
2003-07-12 04:33:32 -03:00
|
|
|
or code in (301, 302, 303) and m == "POST"):
|
|
|
|
# Strictly (according to RFC 2616), 301 or 302 in response
|
|
|
|
# to a POST MUST NOT cause a redirection without confirmation
|
2003-05-04 20:44:49 -03:00
|
|
|
# from the user (of urllib2, in this case). In practice,
|
|
|
|
# essentially all clients do redirect in this case, so we
|
|
|
|
# do the same.
|
2006-03-18 07:35:18 -04:00
|
|
|
# be conciliant with URIs containing a space
|
|
|
|
newurl = newurl.replace(' ', '%20')
|
2008-02-07 15:06:52 -04:00
|
|
|
newheaders = dict((k,v) for k,v in req.headers.items()
|
|
|
|
if k.lower() not in ("content-length", "content-type")
|
|
|
|
)
|
2004-05-31 15:22:40 -03:00
|
|
|
return Request(newurl,
|
2008-02-07 15:06:52 -04:00
|
|
|
headers=newheaders,
|
2004-05-31 15:22:40 -03:00
|
|
|
origin_req_host=req.get_origin_req_host(),
|
|
|
|
unverifiable=True)
|
2003-04-24 12:32:12 -03:00
|
|
|
else:
|
2003-06-14 02:51:25 -03:00
|
|
|
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
|
2003-04-24 12:32:12 -03:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
# Implementation note: To avoid the server sending us into an
|
|
|
|
# infinite loop, the request object needs to track what URLs we
|
|
|
|
# have already seen. Do this by adding a handler-specific
|
|
|
|
# attribute to the Request object.
|
|
|
|
def http_error_302(self, req, fp, code, msg, headers):
|
2004-05-31 15:22:40 -03:00
|
|
|
# Some servers (incorrectly) return multiple Location headers
|
|
|
|
# (so probably same goes for URI). Use first header.
|
2002-06-01 11:18:47 -03:00
|
|
|
if 'location' in headers:
|
2004-05-31 15:22:40 -03:00
|
|
|
newurl = headers.getheaders('location')[0]
|
2002-06-01 11:18:47 -03:00
|
|
|
elif 'uri' in headers:
|
2004-05-31 15:22:40 -03:00
|
|
|
newurl = headers.getheaders('uri')[0]
|
2000-01-20 14:19:08 -04:00
|
|
|
else:
|
|
|
|
return
|
2008-08-17 00:38:39 -03:00
|
|
|
|
|
|
|
# fix a possible malformed URL
|
|
|
|
urlparts = urlparse.urlparse(newurl)
|
2016-05-15 22:07:13 -03:00
|
|
|
if not urlparts.path and urlparts.netloc:
|
2008-08-17 00:38:39 -03:00
|
|
|
urlparts = list(urlparts)
|
|
|
|
urlparts[2] = "/"
|
|
|
|
newurl = urlparse.urlunparse(urlparts)
|
|
|
|
|
2000-10-12 15:54:18 -03:00
|
|
|
newurl = urlparse.urljoin(req.get_full_url(), newurl)
|
|
|
|
|
2011-03-24 12:07:45 -03:00
|
|
|
# For security reasons we do not allow redirects to protocols
|
2011-03-24 14:44:17 -03:00
|
|
|
# other than HTTP, HTTPS or FTP.
|
2011-03-24 12:07:45 -03:00
|
|
|
newurl_lower = newurl.lower()
|
|
|
|
if not (newurl_lower.startswith('http://') or
|
2011-03-24 14:44:17 -03:00
|
|
|
newurl_lower.startswith('https://') or
|
|
|
|
newurl_lower.startswith('ftp://')):
|
2011-03-28 17:47:01 -03:00
|
|
|
raise HTTPError(newurl, code,
|
|
|
|
msg + " - Redirection to url '%s' is not allowed" %
|
|
|
|
newurl,
|
|
|
|
headers, fp)
|
2011-03-24 12:07:45 -03:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
# XXX Probably want to forget about the state of the current
|
|
|
|
# request, although that might interact poorly with other
|
|
|
|
# handlers that also use handler-specific request attributes
|
2003-05-05 01:09:13 -03:00
|
|
|
new = self.redirect_request(req, fp, code, msg, headers, newurl)
|
2003-04-24 12:32:12 -03:00
|
|
|
if new is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# loop detection
|
2004-05-31 15:22:40 -03:00
|
|
|
# .redirect_dict has a key url if url was previously visited.
|
2003-12-14 01:27:34 -04:00
|
|
|
if hasattr(req, 'redirect_dict'):
|
|
|
|
visited = new.redirect_dict = req.redirect_dict
|
2004-05-31 15:22:40 -03:00
|
|
|
if (visited.get(newurl, 0) >= self.max_repeats or
|
|
|
|
len(visited) >= self.max_redirections):
|
2000-01-20 14:19:08 -04:00
|
|
|
raise HTTPError(req.get_full_url(), code,
|
2001-08-07 18:12:25 -03:00
|
|
|
self.inf_msg + msg, headers, fp)
|
2003-12-14 01:27:34 -04:00
|
|
|
else:
|
|
|
|
visited = new.redirect_dict = req.redirect_dict = {}
|
2004-05-31 15:22:40 -03:00
|
|
|
visited[newurl] = visited.get(newurl, 0) + 1
|
2001-08-07 18:12:25 -03:00
|
|
|
|
|
|
|
# Don't close the fp until we are sure that we won't use it
|
2001-08-09 18:40:30 -03:00
|
|
|
# with HTTPError.
|
2001-08-07 18:12:25 -03:00
|
|
|
fp.read()
|
|
|
|
fp.close()
|
|
|
|
|
2009-07-18 23:43:43 -03:00
|
|
|
return self.parent.open(new, timeout=req.timeout)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-04-24 12:32:12 -03:00
|
|
|
http_error_301 = http_error_303 = http_error_307 = http_error_302
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-07-12 04:33:32 -03:00
|
|
|
inf_msg = "The HTTP server returned a redirect error that would " \
|
2000-07-16 09:04:32 -03:00
|
|
|
"lead to an infinite loop.\n" \
|
2003-07-12 04:33:32 -03:00
|
|
|
"The last 30x error message was:\n"
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2006-04-02 17:45:34 -03:00
|
|
|
|
|
|
|
def _parse_proxy(proxy):
|
|
|
|
"""Return (scheme, user, password, host/port) given a URL or an authority.
|
|
|
|
|
|
|
|
If a URL is supplied, it must have an authority (host:port) component.
|
|
|
|
According to RFC 3986, having an authority component means the URL must
|
|
|
|
have two slashes after the scheme:
|
|
|
|
|
|
|
|
>>> _parse_proxy('file:/ftp.example.com/')
|
|
|
|
Traceback (most recent call last):
|
|
|
|
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
|
|
|
|
|
|
|
|
The first three items of the returned tuple may be None.
|
|
|
|
|
|
|
|
Examples of authority parsing:
|
|
|
|
|
|
|
|
>>> _parse_proxy('proxy.example.com')
|
|
|
|
(None, None, None, 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('proxy.example.com:3128')
|
|
|
|
(None, None, None, 'proxy.example.com:3128')
|
|
|
|
|
|
|
|
The authority component may optionally include userinfo (assumed to be
|
|
|
|
username:password):
|
|
|
|
|
|
|
|
>>> _parse_proxy('joe:password@proxy.example.com')
|
|
|
|
(None, 'joe', 'password', 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('joe:password@proxy.example.com:3128')
|
|
|
|
(None, 'joe', 'password', 'proxy.example.com:3128')
|
|
|
|
|
|
|
|
Same examples, but with URLs instead:
|
|
|
|
|
|
|
|
>>> _parse_proxy('http://proxy.example.com/')
|
|
|
|
('http', None, None, 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('http://proxy.example.com:3128/')
|
|
|
|
('http', None, None, 'proxy.example.com:3128')
|
|
|
|
>>> _parse_proxy('http://joe:password@proxy.example.com/')
|
|
|
|
('http', 'joe', 'password', 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('http://joe:password@proxy.example.com:3128')
|
|
|
|
('http', 'joe', 'password', 'proxy.example.com:3128')
|
|
|
|
|
|
|
|
Everything after the authority is ignored:
|
|
|
|
|
|
|
|
>>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
|
|
|
|
('ftp', 'joe', 'password', 'proxy.example.com')
|
|
|
|
|
|
|
|
Test for no trailing '/' case:
|
|
|
|
|
|
|
|
>>> _parse_proxy('http://joe:password@proxy.example.com')
|
|
|
|
('http', 'joe', 'password', 'proxy.example.com')
|
|
|
|
|
|
|
|
"""
|
|
|
|
scheme, r_scheme = splittype(proxy)
|
|
|
|
if not r_scheme.startswith("/"):
|
|
|
|
# authority
|
|
|
|
scheme = None
|
|
|
|
authority = proxy
|
|
|
|
else:
|
|
|
|
# URL
|
|
|
|
if not r_scheme.startswith("//"):
|
|
|
|
raise ValueError("proxy URL with no authority: %r" % proxy)
|
|
|
|
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
|
|
|
|
# and 3.3.), path is empty or starts with '/'
|
|
|
|
end = r_scheme.find("/", 2)
|
|
|
|
if end == -1:
|
|
|
|
end = None
|
|
|
|
authority = r_scheme[2:end]
|
|
|
|
userinfo, hostport = splituser(authority)
|
|
|
|
if userinfo is not None:
|
|
|
|
user, password = splitpasswd(userinfo)
|
|
|
|
else:
|
|
|
|
user = password = None
|
|
|
|
return scheme, user, password, hostport
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
class ProxyHandler(BaseHandler):
|
2003-06-07 14:53:08 -03:00
|
|
|
# Proxies must be in front
|
|
|
|
handler_order = 100
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def __init__(self, proxies=None):
|
2000-02-10 13:17:14 -04:00
|
|
|
if proxies is None:
|
|
|
|
proxies = getproxies()
|
|
|
|
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
|
|
|
|
self.proxies = proxies
|
2003-05-17 23:25:07 -03:00
|
|
|
for type, url in proxies.items():
|
2001-01-14 23:34:38 -04:00
|
|
|
setattr(self, '%s_open' % type,
|
2000-02-10 13:17:14 -04:00
|
|
|
lambda r, proxy=url, type=type, meth=self.proxy_open: \
|
|
|
|
meth(r, proxy, type))
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def proxy_open(self, req, proxy, type):
|
2000-02-10 13:17:14 -04:00
|
|
|
orig_type = req.get_type()
|
2006-04-02 17:45:34 -03:00
|
|
|
proxy_type, user, password, hostport = _parse_proxy(proxy)
|
2009-10-10 23:00:07 -03:00
|
|
|
|
2006-04-02 17:45:34 -03:00
|
|
|
if proxy_type is None:
|
|
|
|
proxy_type = orig_type
|
2009-10-10 23:00:07 -03:00
|
|
|
|
|
|
|
if req.host and proxy_bypass(req.host):
|
|
|
|
return None
|
|
|
|
|
2006-01-21 03:20:56 -04:00
|
|
|
if user and password:
|
2006-04-02 17:45:34 -03:00
|
|
|
user_pass = '%s:%s' % (unquote(user), unquote(password))
|
2006-10-27 14:11:23 -03:00
|
|
|
creds = base64.b64encode(user_pass).strip()
|
2006-08-20 10:15:39 -03:00
|
|
|
req.add_header('Proxy-authorization', 'Basic ' + creds)
|
2006-04-02 17:45:34 -03:00
|
|
|
hostport = unquote(hostport)
|
|
|
|
req.set_proxy(hostport, proxy_type)
|
2009-10-10 23:00:07 -03:00
|
|
|
|
2009-05-24 06:14:50 -03:00
|
|
|
if orig_type == proxy_type or orig_type == 'https':
|
2000-02-10 13:17:14 -04:00
|
|
|
# let other handlers take care of it
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
# need to start over, because the other handlers don't
|
|
|
|
# grok the proxy's URL type
|
2006-04-02 17:45:34 -03:00
|
|
|
# e.g. if we have a constructor arg proxies like so:
|
|
|
|
# {'http': 'ftp://proxy.example.com'}, we may end up turning
|
|
|
|
# a request for http://acme.example.com/a into one for
|
|
|
|
# ftp://proxy.example.com/a
|
2009-07-18 23:43:43 -03:00
|
|
|
return self.parent.open(req, timeout=req.timeout)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
class HTTPPasswordMgr:
|
2006-04-30 04:06:11 -03:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def __init__(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
self.passwd = {}
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def add_password(self, realm, uri, user, passwd):
|
2000-02-10 13:17:14 -04:00
|
|
|
# uri could be a single URI or a sequence
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
if isinstance(uri, basestring):
|
2000-02-10 13:17:14 -04:00
|
|
|
uri = [uri]
|
2002-06-01 11:18:47 -03:00
|
|
|
if not realm in self.passwd:
|
2000-02-10 13:17:14 -04:00
|
|
|
self.passwd[realm] = {}
|
2006-05-28 17:23:12 -03:00
|
|
|
for default_port in True, False:
|
|
|
|
reduced_uri = tuple(
|
|
|
|
[self.reduce_uri(u, default_port) for u in uri])
|
|
|
|
self.passwd[realm][reduced_uri] = (user, passwd)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def find_user_password(self, realm, authuri):
|
2000-02-10 13:17:14 -04:00
|
|
|
domains = self.passwd.get(realm, {})
|
2006-05-28 17:23:12 -03:00
|
|
|
for default_port in True, False:
|
|
|
|
reduced_authuri = self.reduce_uri(authuri, default_port)
|
|
|
|
for uris, authinfo in domains.iteritems():
|
|
|
|
for uri in uris:
|
|
|
|
if self.is_suburi(uri, reduced_authuri):
|
|
|
|
return authinfo
|
2000-02-10 13:17:14 -04:00
|
|
|
return None, None
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2006-05-28 17:23:12 -03:00
|
|
|
def reduce_uri(self, uri, default_port=True):
|
|
|
|
"""Accept authority or URI and extract only the authority and path."""
|
|
|
|
# note HTTP URLs do not have a userinfo component
|
2006-04-30 04:06:11 -03:00
|
|
|
parts = urlparse.urlsplit(uri)
|
2000-02-10 13:17:14 -04:00
|
|
|
if parts[1]:
|
2006-04-30 04:06:11 -03:00
|
|
|
# URI
|
2006-05-28 17:23:12 -03:00
|
|
|
scheme = parts[0]
|
|
|
|
authority = parts[1]
|
|
|
|
path = parts[2] or '/'
|
2000-02-10 13:17:14 -04:00
|
|
|
else:
|
2006-05-28 17:23:12 -03:00
|
|
|
# host or host:port
|
|
|
|
scheme = None
|
|
|
|
authority = uri
|
|
|
|
path = '/'
|
|
|
|
host, port = splitport(authority)
|
|
|
|
if default_port and port is None and scheme is not None:
|
|
|
|
dport = {"http": 80,
|
|
|
|
"https": 443,
|
|
|
|
}.get(scheme)
|
|
|
|
if dport is not None:
|
|
|
|
authority = "%s:%d" % (host, dport)
|
|
|
|
return authority, path
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def is_suburi(self, base, test):
|
2000-02-10 13:17:14 -04:00
|
|
|
"""Check if test is below base in a URI tree
|
|
|
|
|
|
|
|
Both args must be URIs in reduced form.
|
|
|
|
"""
|
|
|
|
if base == test:
|
2002-04-07 03:36:23 -03:00
|
|
|
return True
|
2000-02-10 13:17:14 -04:00
|
|
|
if base[0] != test[0]:
|
2002-04-07 03:36:23 -03:00
|
|
|
return False
|
2001-03-01 04:40:42 -04:00
|
|
|
common = posixpath.commonprefix((base[1], test[1]))
|
2000-02-10 13:17:14 -04:00
|
|
|
if len(common) == len(base[1]):
|
2002-04-07 03:36:23 -03:00
|
|
|
return True
|
|
|
|
return False
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
|
|
|
|
|
|
|
|
def find_user_password(self, realm, authuri):
|
2003-07-10 10:30:12 -03:00
|
|
|
user, password = HTTPPasswordMgr.find_user_password(self, realm,
|
|
|
|
authuri)
|
2001-03-01 04:40:42 -04:00
|
|
|
if user is not None:
|
|
|
|
return user, password
|
|
|
|
return HTTPPasswordMgr.find_user_password(self, None, authuri)
|
|
|
|
|
|
|
|
|
|
|
|
class AbstractBasicAuthHandler:
|
|
|
|
|
2007-03-07 03:39:06 -04:00
|
|
|
# XXX this allows for multiple auth-schemes, but will stupidly pick
|
|
|
|
# the last one with a realm specified.
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2008-03-21 16:54:00 -03:00
|
|
|
# allow for double- and single-quoted realm values
|
|
|
|
# (single quotes are a violation of the RFC, but appear in the wild)
|
|
|
|
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
|
2012-05-15 11:24:10 -03:00
|
|
|
'realm=(["\']?)([^"\']*)\\2', re.I)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2006-05-29 17:52:54 -03:00
|
|
|
# XXX could pre-emptively send auth info already accepted (RFC 2617,
|
|
|
|
# end of section 2, and section 1.2 immediately after "credentials"
|
|
|
|
# production).
|
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
def __init__(self, password_mgr=None):
|
|
|
|
if password_mgr is None:
|
|
|
|
password_mgr = HTTPPasswordMgr()
|
|
|
|
self.passwd = password_mgr
|
2000-02-10 13:17:14 -04:00
|
|
|
self.add_password = self.passwd.add_password
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2010-08-19 14:32:03 -03:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
def http_error_auth_reqed(self, authreq, host, req, headers):
|
2006-04-30 04:06:11 -03:00
|
|
|
# host may be an authority (without userinfo) or a URL with an
|
|
|
|
# authority
|
2001-03-01 04:40:42 -04:00
|
|
|
# XXX could be multiple headers
|
|
|
|
authreq = headers.get(authreq, None)
|
2010-06-01 09:40:07 -03:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
if authreq:
|
2004-08-03 09:59:55 -03:00
|
|
|
mo = AbstractBasicAuthHandler.rx.search(authreq)
|
2000-01-20 14:19:08 -04:00
|
|
|
if mo:
|
2008-03-21 16:54:00 -03:00
|
|
|
scheme, quote, realm = mo.groups()
|
2012-05-15 12:59:19 -03:00
|
|
|
if quote not in ['"', "'"]:
|
|
|
|
warnings.warn("Basic Auth Realm was unquoted",
|
|
|
|
UserWarning, 2)
|
2001-02-09 07:10:16 -04:00
|
|
|
if scheme.lower() == 'basic':
|
2014-08-19 23:22:59 -03:00
|
|
|
return self.retry_http_basic_auth(host, req, realm)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
def retry_http_basic_auth(self, host, req, realm):
|
2006-04-30 04:06:11 -03:00
|
|
|
user, pw = self.passwd.find_user_password(realm, host)
|
2004-05-05 22:41:26 -03:00
|
|
|
if pw is not None:
|
2000-02-10 13:17:14 -04:00
|
|
|
raw = "%s:%s" % (user, pw)
|
2006-10-27 14:11:23 -03:00
|
|
|
auth = 'Basic %s' % base64.b64encode(raw).strip()
|
2014-08-19 23:22:59 -03:00
|
|
|
if req.get_header(self.auth_header, None) == auth:
|
2001-11-09 12:46:51 -04:00
|
|
|
return None
|
2010-02-24 12:45:46 -04:00
|
|
|
req.add_unredirected_header(self.auth_header, auth)
|
2009-07-18 23:43:43 -03:00
|
|
|
return self.parent.open(req, timeout=req.timeout)
|
2000-01-20 14:19:08 -04:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2006-04-30 04:06:11 -03:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-11-09 12:46:51 -04:00
|
|
|
auth_header = 'Authorization'
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
def http_error_401(self, req, fp, code, msg, headers):
|
2006-04-30 04:06:11 -03:00
|
|
|
url = req.get_full_url()
|
2010-08-19 14:32:03 -03:00
|
|
|
response = self.http_error_auth_reqed('www-authenticate',
|
|
|
|
url, req, headers)
|
|
|
|
return response
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
|
|
|
|
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
|
|
|
|
|
2006-08-20 10:15:39 -03:00
|
|
|
auth_header = 'Proxy-authorization'
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
def http_error_407(self, req, fp, code, msg, headers):
|
2006-04-30 04:06:11 -03:00
|
|
|
# http_error_auth_reqed requires that there is no userinfo component in
|
|
|
|
# authority. Assume there isn't one, since urllib2 does not (and
|
|
|
|
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
|
|
|
|
# userinfo.
|
|
|
|
authority = req.get_host()
|
2010-08-19 14:32:03 -03:00
|
|
|
response = self.http_error_auth_reqed('proxy-authenticate',
|
2006-04-30 04:06:11 -03:00
|
|
|
authority, req, headers)
|
2010-08-19 14:32:03 -03:00
|
|
|
return response
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
|
2003-10-21 15:07:07 -03:00
|
|
|
def randombytes(n):
|
|
|
|
"""Return n random bytes."""
|
|
|
|
# Use /dev/urandom if it is available. Fall back to random module
|
|
|
|
# if not. It might be worthwhile to extend this function to use
|
|
|
|
# other platform-specific mechanisms for getting random bytes.
|
|
|
|
if os.path.exists("/dev/urandom"):
|
|
|
|
f = open("/dev/urandom")
|
|
|
|
s = f.read(n)
|
|
|
|
f.close()
|
|
|
|
return s
|
|
|
|
else:
|
|
|
|
L = [chr(random.randrange(0, 256)) for i in range(n)]
|
|
|
|
return "".join(L)
|
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
class AbstractDigestAuthHandler:
|
2003-10-21 15:07:07 -03:00
|
|
|
# Digest authentication is specified in RFC 2617.
|
|
|
|
|
|
|
|
# XXX The client does not inspect the Authentication-Info header
|
|
|
|
# in a successful response.
|
|
|
|
|
|
|
|
# XXX It should be possible to test this implementation against
|
|
|
|
# a mock server that just generates a static set of challenges.
|
|
|
|
|
|
|
|
# XXX qop="auth-int" supports is shaky
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
def __init__(self, passwd=None):
|
|
|
|
if passwd is None:
|
2001-08-07 18:12:25 -03:00
|
|
|
passwd = HTTPPasswordMgr()
|
2001-03-01 04:40:42 -04:00
|
|
|
self.passwd = passwd
|
2000-02-10 13:17:14 -04:00
|
|
|
self.add_password = self.passwd.add_password
|
2003-10-21 15:07:07 -03:00
|
|
|
self.retried = 0
|
|
|
|
self.nonce_count = 0
|
2009-11-15 04:36:20 -04:00
|
|
|
self.last_nonce = None
|
2003-10-21 15:07:07 -03:00
|
|
|
|
|
|
|
def reset_retry_count(self):
|
|
|
|
self.retried = 0
|
|
|
|
|
|
|
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
|
|
|
authreq = headers.get(auth_header, None)
|
|
|
|
if self.retried > 5:
|
|
|
|
# Don't fail endlessly - if we failed once, we'll probably
|
|
|
|
# fail a second time. Hm. Unless the Password Manager is
|
|
|
|
# prompting for the information. Crap. This isn't great
|
|
|
|
# but it's better than the current 'repeat until recursion
|
|
|
|
# depth exceeded' approach <wink>
|
2004-01-18 16:29:55 -04:00
|
|
|
raise HTTPError(req.get_full_url(), 401, "digest auth failed",
|
2003-10-21 15:07:07 -03:00
|
|
|
headers, None)
|
|
|
|
else:
|
|
|
|
self.retried += 1
|
2000-02-10 13:17:14 -04:00
|
|
|
if authreq:
|
2003-10-21 15:07:07 -03:00
|
|
|
scheme = authreq.split()[0]
|
|
|
|
if scheme.lower() == 'digest':
|
2000-02-10 13:17:14 -04:00
|
|
|
return self.retry_http_digest_auth(req, authreq)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def retry_http_digest_auth(self, req, auth):
|
2001-02-09 07:10:16 -04:00
|
|
|
token, challenge = auth.split(' ', 1)
|
2000-02-10 13:17:14 -04:00
|
|
|
chal = parse_keqv_list(parse_http_list(challenge))
|
|
|
|
auth = self.get_authorization(req, chal)
|
|
|
|
if auth:
|
2001-11-09 12:46:51 -04:00
|
|
|
auth_val = 'Digest %s' % auth
|
|
|
|
if req.headers.get(self.auth_header, None) == auth_val:
|
|
|
|
return None
|
2006-05-03 02:05:02 -03:00
|
|
|
req.add_unredirected_header(self.auth_header, auth_val)
|
2009-07-18 23:43:43 -03:00
|
|
|
resp = self.parent.open(req, timeout=req.timeout)
|
2000-02-10 13:17:14 -04:00
|
|
|
return resp
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2003-10-21 15:07:07 -03:00
|
|
|
def get_cnonce(self, nonce):
|
|
|
|
# The cnonce-value is an opaque
|
|
|
|
# quoted string value provided by the client and used by both client
|
|
|
|
# and server to avoid chosen plaintext attacks, to provide mutual
|
|
|
|
# authentication, and to provide some message integrity protection.
|
|
|
|
# This isn't a fabulous effort, but it's probably Good Enough.
|
2006-04-30 05:57:35 -03:00
|
|
|
dig = hashlib.sha1("%s:%s:%s:%s" % (self.nonce_count, nonce, time.ctime(),
|
|
|
|
randombytes(8))).hexdigest()
|
2003-10-21 15:07:07 -03:00
|
|
|
return dig[:16]
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
def get_authorization(self, req, chal):
|
2000-02-10 13:17:14 -04:00
|
|
|
try:
|
|
|
|
realm = chal['realm']
|
|
|
|
nonce = chal['nonce']
|
2003-10-21 15:07:07 -03:00
|
|
|
qop = chal.get('qop')
|
2000-02-10 13:17:14 -04:00
|
|
|
algorithm = chal.get('algorithm', 'MD5')
|
|
|
|
# mod_digest doesn't send an opaque, even though it isn't
|
|
|
|
# supposed to be optional
|
|
|
|
opaque = chal.get('opaque', None)
|
|
|
|
except KeyError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
H, KD = self.get_algorithm_impls(algorithm)
|
|
|
|
if H is None:
|
|
|
|
return None
|
|
|
|
|
2003-10-21 15:07:07 -03:00
|
|
|
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
|
2000-02-10 13:17:14 -04:00
|
|
|
if user is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# XXX not implemented yet
|
|
|
|
if req.has_data():
|
|
|
|
entdig = self.get_entity_digest(req.get_data(), chal)
|
|
|
|
else:
|
|
|
|
entdig = None
|
|
|
|
|
|
|
|
A1 = "%s:%s:%s" % (user, realm, pw)
|
2005-01-09 01:51:49 -04:00
|
|
|
A2 = "%s:%s" % (req.get_method(),
|
2000-02-10 13:17:14 -04:00
|
|
|
# XXX selector: what about proxies and full urls
|
|
|
|
req.get_selector())
|
2003-10-21 15:07:07 -03:00
|
|
|
if qop == 'auth':
|
2009-11-15 04:36:20 -04:00
|
|
|
if nonce == self.last_nonce:
|
|
|
|
self.nonce_count += 1
|
|
|
|
else:
|
|
|
|
self.nonce_count = 1
|
|
|
|
self.last_nonce = nonce
|
|
|
|
|
2003-10-21 15:07:07 -03:00
|
|
|
ncvalue = '%08x' % self.nonce_count
|
|
|
|
cnonce = self.get_cnonce(nonce)
|
|
|
|
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
|
|
|
|
respdig = KD(H(A1), noncebit)
|
|
|
|
elif qop is None:
|
|
|
|
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
|
|
|
|
else:
|
|
|
|
# XXX handle auth-int.
|
2007-06-07 10:34:10 -03:00
|
|
|
raise URLError("qop '%s' is not supported." % qop)
|
2004-01-18 16:29:55 -04:00
|
|
|
|
2000-02-10 13:17:14 -04:00
|
|
|
# XXX should the partial digests be encoded too?
|
|
|
|
|
|
|
|
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
|
|
|
'response="%s"' % (user, realm, nonce, req.get_selector(),
|
|
|
|
respdig)
|
|
|
|
if opaque:
|
2004-12-22 10:27:19 -04:00
|
|
|
base += ', opaque="%s"' % opaque
|
2000-02-10 13:17:14 -04:00
|
|
|
if entdig:
|
2004-12-22 10:27:19 -04:00
|
|
|
base += ', digest="%s"' % entdig
|
|
|
|
base += ', algorithm="%s"' % algorithm
|
2003-10-21 15:07:07 -03:00
|
|
|
if qop:
|
2004-12-22 10:27:19 -04:00
|
|
|
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
2000-02-10 13:17:14 -04:00
|
|
|
return base
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def get_algorithm_impls(self, algorithm):
|
2008-05-04 18:40:44 -03:00
|
|
|
# algorithm should be case-insensitive according to RFC2617
|
|
|
|
algorithm = algorithm.upper()
|
2000-02-10 13:17:14 -04:00
|
|
|
# lambdas assume digest modules are imported at the top level
|
|
|
|
if algorithm == 'MD5':
|
2006-04-30 05:57:35 -03:00
|
|
|
H = lambda x: hashlib.md5(x).hexdigest()
|
2000-02-10 13:17:14 -04:00
|
|
|
elif algorithm == 'SHA':
|
2006-04-30 05:57:35 -03:00
|
|
|
H = lambda x: hashlib.sha1(x).hexdigest()
|
2000-02-10 13:17:14 -04:00
|
|
|
# XXX MD5-sess
|
2016-03-06 10:27:23 -04:00
|
|
|
else:
|
|
|
|
raise ValueError("Unsupported digest authentication "
|
|
|
|
"algorithm %r" % algorithm.lower())
|
2003-10-21 15:07:07 -03:00
|
|
|
KD = lambda s, d: H("%s:%s" % (s, d))
|
2000-02-10 13:17:14 -04:00
|
|
|
return H, KD
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def get_entity_digest(self, data, chal):
|
2000-02-10 13:17:14 -04:00
|
|
|
# XXX not implemented yet
|
|
|
|
return None
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
|
|
|
|
"""An authentication protocol defined by RFC 2069
|
|
|
|
|
|
|
|
Digest authentication improves on basic authentication because it
|
|
|
|
does not transmit passwords in the clear.
|
|
|
|
"""
|
|
|
|
|
2003-07-10 10:30:12 -03:00
|
|
|
auth_header = 'Authorization'
|
2006-05-29 17:52:54 -03:00
|
|
|
handler_order = 490 # before Basic auth
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
def http_error_401(self, req, fp, code, msg, headers):
|
|
|
|
host = urlparse.urlparse(req.get_full_url())[1]
|
2004-01-18 16:29:55 -04:00
|
|
|
retry = self.http_error_auth_reqed('www-authenticate',
|
2003-10-21 15:07:07 -03:00
|
|
|
host, req, headers)
|
|
|
|
self.reset_retry_count()
|
|
|
|
return retry
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
|
|
|
|
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
|
|
|
|
|
2003-07-10 10:30:12 -03:00
|
|
|
auth_header = 'Proxy-Authorization'
|
2006-05-29 17:52:54 -03:00
|
|
|
handler_order = 490 # before Basic auth
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
def http_error_407(self, req, fp, code, msg, headers):
|
|
|
|
host = req.get_host()
|
2004-01-18 16:29:55 -04:00
|
|
|
retry = self.http_error_auth_reqed('proxy-authenticate',
|
2003-10-21 15:07:07 -03:00
|
|
|
host, req, headers)
|
|
|
|
self.reset_retry_count()
|
|
|
|
return retry
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
class AbstractHTTPHandler(BaseHandler):
|
|
|
|
|
2003-12-14 01:27:34 -04:00
|
|
|
def __init__(self, debuglevel=0):
|
|
|
|
self._debuglevel = debuglevel
|
|
|
|
|
|
|
|
def set_http_debuglevel(self, level):
|
|
|
|
self._debuglevel = level
|
|
|
|
|
2004-05-31 15:22:40 -03:00
|
|
|
def do_request_(self, request):
|
2003-12-14 01:27:34 -04:00
|
|
|
host = request.get_host()
|
|
|
|
if not host:
|
|
|
|
raise URLError('no host given')
|
|
|
|
|
|
|
|
if request.has_data(): # POST
|
|
|
|
data = request.get_data()
|
2006-08-20 10:15:39 -03:00
|
|
|
if not request.has_header('Content-type'):
|
2003-12-14 01:27:34 -04:00
|
|
|
request.add_unredirected_header(
|
2006-08-20 10:15:39 -03:00
|
|
|
'Content-type',
|
2003-12-14 01:27:34 -04:00
|
|
|
'application/x-www-form-urlencoded')
|
2006-08-20 10:15:39 -03:00
|
|
|
if not request.has_header('Content-length'):
|
2003-12-14 01:27:34 -04:00
|
|
|
request.add_unredirected_header(
|
2006-08-20 10:15:39 -03:00
|
|
|
'Content-length', '%d' % len(data))
|
2003-12-14 01:27:34 -04:00
|
|
|
|
2008-08-16 11:44:07 -03:00
|
|
|
sel_host = host
|
|
|
|
if request.has_proxy():
|
|
|
|
scheme, sel = splittype(request.get_selector())
|
|
|
|
sel_host, sel_path = splithost(sel)
|
|
|
|
|
2003-12-14 01:27:34 -04:00
|
|
|
if not request.has_header('Host'):
|
2008-08-16 11:44:07 -03:00
|
|
|
request.add_unredirected_header('Host', sel_host)
|
2003-12-14 01:27:34 -04:00
|
|
|
for name, value in self.parent.addheaders:
|
2006-08-20 10:15:39 -03:00
|
|
|
name = name.capitalize()
|
2003-12-14 01:27:34 -04:00
|
|
|
if not request.has_header(name):
|
|
|
|
request.add_unredirected_header(name, value)
|
|
|
|
|
|
|
|
return request
|
|
|
|
|
2014-11-23 13:42:45 -04:00
|
|
|
def do_open(self, http_class, req, **http_conn_args):
|
2003-12-17 14:52:16 -04:00
|
|
|
"""Return an addinfourl object for the request, using http_class.
|
|
|
|
|
|
|
|
http_class must implement the HTTPConnection API from httplib.
|
|
|
|
The addinfourl return value is a file-like object. It also
|
|
|
|
has methods and attributes including:
|
|
|
|
- info(): return a mimetools.Message object for the headers
|
|
|
|
- geturl(): return the original request URL
|
|
|
|
- code: HTTP status code
|
|
|
|
"""
|
2001-04-11 04:44:53 -03:00
|
|
|
host = req.get_host()
|
2000-01-20 14:19:08 -04:00
|
|
|
if not host:
|
|
|
|
raise URLError('no host given')
|
|
|
|
|
2014-11-23 13:42:45 -04:00
|
|
|
# will parse host:port
|
|
|
|
h = http_class(host, timeout=req.timeout, **http_conn_args)
|
2003-12-14 01:27:34 -04:00
|
|
|
h.set_debuglevel(self._debuglevel)
|
2001-01-14 23:34:38 -04:00
|
|
|
|
2010-09-26 22:40:59 -03:00
|
|
|
headers = dict(req.unredirected_hdrs)
|
|
|
|
headers.update(dict((k, v) for k, v in req.headers.items()
|
|
|
|
if k not in headers))
|
|
|
|
|
2004-02-24 15:40:35 -04:00
|
|
|
# We want to make an HTTP/1.1 request, but the addinfourl
|
|
|
|
# class isn't prepared to deal with a persistent connection.
|
|
|
|
# It will try to read all remaining data from the socket,
|
|
|
|
# which will block while the server waits for the next request.
|
|
|
|
# So make sure the connection gets closed after the (only)
|
|
|
|
# request.
|
|
|
|
headers["Connection"] = "close"
|
2006-08-20 10:15:39 -03:00
|
|
|
headers = dict(
|
|
|
|
(name.title(), val) for name, val in headers.items())
|
2009-05-24 06:14:50 -03:00
|
|
|
|
|
|
|
if req._tunnel_host:
|
2009-12-20 02:05:13 -04:00
|
|
|
tunnel_headers = {}
|
|
|
|
proxy_auth_hdr = "Proxy-Authorization"
|
|
|
|
if proxy_auth_hdr in headers:
|
|
|
|
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
|
|
|
|
# Proxy-Authorization should not be sent to origin
|
|
|
|
# server.
|
|
|
|
del headers[proxy_auth_hdr]
|
|
|
|
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
|
2009-05-24 06:14:50 -03:00
|
|
|
|
2003-05-04 20:44:49 -03:00
|
|
|
try:
|
2003-12-17 14:52:16 -04:00
|
|
|
h.request(req.get_method(), req.get_selector(), req.data, headers)
|
2011-07-26 22:37:17 -03:00
|
|
|
except socket.error, err: # XXX what error?
|
|
|
|
h.close()
|
|
|
|
raise URLError(err)
|
|
|
|
else:
|
2009-01-11 12:23:37 -04:00
|
|
|
try:
|
|
|
|
r = h.getresponse(buffering=True)
|
2011-07-26 22:37:17 -03:00
|
|
|
except TypeError: # buffering kw not supported
|
2009-01-11 12:23:37 -04:00
|
|
|
r = h.getresponse()
|
2003-12-17 14:52:16 -04:00
|
|
|
|
2004-07-10 12:34:34 -03:00
|
|
|
# Pick apart the HTTPResponse object to get the addinfourl
|
2004-08-07 14:40:50 -03:00
|
|
|
# object initialized properly.
|
|
|
|
|
|
|
|
# Wrap the HTTPResponse object in socket's file object adapter
|
|
|
|
# for Windows. That adapter calls recv(), so delegate recv()
|
|
|
|
# to read(). This weird wrapping allows the returned object to
|
|
|
|
# have readline() and readlines() methods.
|
2004-08-07 22:05:14 -03:00
|
|
|
|
2004-08-07 14:40:50 -03:00
|
|
|
# XXX It might be better to extract the read buffering code
|
|
|
|
# out of socket._fileobject() and into a base class.
|
2004-08-07 22:05:14 -03:00
|
|
|
|
2004-08-07 14:40:50 -03:00
|
|
|
r.recv = r.read
|
2007-01-21 06:35:10 -04:00
|
|
|
fp = socket._fileobject(r, close=True)
|
2004-08-07 22:05:14 -03:00
|
|
|
|
2004-08-07 14:40:50 -03:00
|
|
|
resp = addinfourl(fp, r.msg, req.get_full_url())
|
2004-07-10 12:34:34 -03:00
|
|
|
resp.code = r.status
|
|
|
|
resp.msg = r.reason
|
|
|
|
return resp
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
class HTTPHandler(AbstractHTTPHandler):
|
|
|
|
|
|
|
|
def http_open(self, req):
|
2003-12-17 14:52:16 -04:00
|
|
|
return self.do_open(httplib.HTTPConnection, req)
|
2001-03-01 04:40:42 -04:00
|
|
|
|
2004-05-31 15:22:40 -03:00
|
|
|
http_request = AbstractHTTPHandler.do_request_
|
2001-03-01 04:40:42 -04:00
|
|
|
|
|
|
|
if hasattr(httplib, 'HTTPS'):
|
|
|
|
class HTTPSHandler(AbstractHTTPHandler):
|
|
|
|
|
2014-12-07 14:41:26 -04:00
|
|
|
def __init__(self, debuglevel=0, context=None):
|
2014-11-23 13:42:45 -04:00
|
|
|
AbstractHTTPHandler.__init__(self, debuglevel)
|
|
|
|
self._context = context
|
|
|
|
|
2001-03-01 04:40:42 -04:00
|
|
|
def https_open(self, req):
|
2014-11-23 13:42:45 -04:00
|
|
|
return self.do_open(httplib.HTTPSConnection, req,
|
2014-12-07 14:41:26 -04:00
|
|
|
context=self._context)
|
2001-03-01 04:40:42 -04:00
|
|
|
|
2004-05-31 15:22:40 -03:00
|
|
|
https_request = AbstractHTTPHandler.do_request_
|
|
|
|
|
|
|
|
class HTTPCookieProcessor(BaseHandler):
|
|
|
|
def __init__(self, cookiejar=None):
|
2006-05-17 12:17:00 -03:00
|
|
|
import cookielib
|
2004-05-31 15:22:40 -03:00
|
|
|
if cookiejar is None:
|
2004-06-07 00:49:50 -03:00
|
|
|
cookiejar = cookielib.CookieJar()
|
2004-05-31 15:22:40 -03:00
|
|
|
self.cookiejar = cookiejar
|
|
|
|
|
|
|
|
def http_request(self, request):
|
|
|
|
self.cookiejar.add_cookie_header(request)
|
|
|
|
return request
|
|
|
|
|
|
|
|
def http_response(self, request, response):
|
|
|
|
self.cookiejar.extract_cookies(response, request)
|
|
|
|
return response
|
|
|
|
|
|
|
|
https_request = http_request
|
|
|
|
https_response = http_response
|
2001-03-01 04:40:42 -04:00
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
class UnknownHandler(BaseHandler):
|
|
|
|
def unknown_open(self, req):
|
2000-02-10 13:17:14 -04:00
|
|
|
type = req.get_type()
|
2000-01-20 14:19:08 -04:00
|
|
|
raise URLError('unknown url type: %s' % type)
|
|
|
|
|
|
|
|
def parse_keqv_list(l):
|
|
|
|
"""Parse list of key=value strings where keys are not duplicated."""
|
|
|
|
parsed = {}
|
|
|
|
for elt in l:
|
2001-02-09 07:10:16 -04:00
|
|
|
k, v = elt.split('=', 1)
|
2000-02-10 13:17:14 -04:00
|
|
|
if v[0] == '"' and v[-1] == '"':
|
|
|
|
v = v[1:-1]
|
|
|
|
parsed[k] = v
|
2000-01-20 14:19:08 -04:00
|
|
|
return parsed
|
|
|
|
|
|
|
|
def parse_http_list(s):
|
|
|
|
"""Parse lists as described by RFC 2068 Section 2.
|
2005-08-26 12:20:46 -03:00
|
|
|
|
2004-04-06 16:43:03 -03:00
|
|
|
In particular, parse comma-separated lists where the elements of
|
2000-01-20 14:19:08 -04:00
|
|
|
the list may include quoted-strings. A quoted-string could
|
2005-08-24 19:20:32 -03:00
|
|
|
contain a comma. A non-quoted string could have quotes in the
|
|
|
|
middle. Neither commas nor quotes count if they are escaped.
|
|
|
|
Only double-quotes count, not single-quotes.
|
2000-01-20 14:19:08 -04:00
|
|
|
"""
|
2005-08-24 19:20:32 -03:00
|
|
|
res = []
|
|
|
|
part = ''
|
|
|
|
|
|
|
|
escape = quote = False
|
|
|
|
for cur in s:
|
|
|
|
if escape:
|
|
|
|
part += cur
|
|
|
|
escape = False
|
|
|
|
continue
|
|
|
|
if quote:
|
|
|
|
if cur == '\\':
|
|
|
|
escape = True
|
2000-02-10 13:17:14 -04:00
|
|
|
continue
|
2005-08-24 19:20:32 -03:00
|
|
|
elif cur == '"':
|
|
|
|
quote = False
|
|
|
|
part += cur
|
|
|
|
continue
|
|
|
|
|
|
|
|
if cur == ',':
|
|
|
|
res.append(part)
|
|
|
|
part = ''
|
|
|
|
continue
|
|
|
|
|
|
|
|
if cur == '"':
|
|
|
|
quote = True
|
2005-08-26 12:20:46 -03:00
|
|
|
|
2005-08-24 19:20:32 -03:00
|
|
|
part += cur
|
|
|
|
|
|
|
|
# append last part
|
|
|
|
if part:
|
|
|
|
res.append(part)
|
|
|
|
|
|
|
|
return [part.strip() for part in res]
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2010-08-11 15:18:22 -03:00
|
|
|
def _safe_gethostbyname(host):
|
|
|
|
try:
|
|
|
|
return socket.gethostbyname(host)
|
|
|
|
except socket.gaierror:
|
|
|
|
return None
|
|
|
|
|
2000-01-20 14:19:08 -04:00
|
|
|
class FileHandler(BaseHandler):
|
|
|
|
# Use local file or FTP depending on form of URL
|
|
|
|
def file_open(self, req):
|
2000-02-10 13:17:14 -04:00
|
|
|
url = req.get_selector()
|
2010-07-11 00:18:51 -03:00
|
|
|
if url[:2] == '//' and url[2:3] != '/' and (req.host and
|
|
|
|
req.host != 'localhost'):
|
2000-02-10 13:17:14 -04:00
|
|
|
req.type = 'ftp'
|
|
|
|
return self.parent.open(req)
|
|
|
|
else:
|
|
|
|
return self.open_local_file(req)
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
# names for the localhost
|
|
|
|
names = None
|
|
|
|
def get_names(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
if FileHandler.names is None:
|
2006-04-02 17:37:17 -03:00
|
|
|
try:
|
2009-12-27 05:11:09 -04:00
|
|
|
FileHandler.names = tuple(
|
|
|
|
socket.gethostbyname_ex('localhost')[2] +
|
|
|
|
socket.gethostbyname_ex(socket.gethostname())[2])
|
2006-04-02 17:37:17 -03:00
|
|
|
except socket.gaierror:
|
|
|
|
FileHandler.names = (socket.gethostbyname('localhost'),)
|
2000-02-10 13:17:14 -04:00
|
|
|
return FileHandler.names
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
# not entirely sure what the rules are here
|
|
|
|
def open_local_file(self, req):
|
2007-01-22 15:40:21 -04:00
|
|
|
import email.utils
|
2006-05-17 12:17:00 -03:00
|
|
|
import mimetypes
|
2000-02-10 13:17:14 -04:00
|
|
|
host = req.get_host()
|
2010-05-08 02:00:11 -03:00
|
|
|
filename = req.get_selector()
|
|
|
|
localfile = url2pathname(filename)
|
2007-03-13 05:14:27 -03:00
|
|
|
try:
|
|
|
|
stats = os.stat(localfile)
|
|
|
|
size = stats.st_size
|
|
|
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
2010-05-08 02:00:11 -03:00
|
|
|
mtype = mimetypes.guess_type(filename)[0]
|
2007-03-13 05:14:27 -03:00
|
|
|
headers = mimetools.Message(StringIO(
|
|
|
|
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
|
|
|
|
(mtype or 'text/plain', size, modified)))
|
|
|
|
if host:
|
|
|
|
host, port = splitport(host)
|
|
|
|
if not host or \
|
2010-08-11 15:18:22 -03:00
|
|
|
(not port and _safe_gethostbyname(host) in self.get_names()):
|
2010-05-08 02:00:11 -03:00
|
|
|
if host:
|
|
|
|
origurl = 'file://' + host + filename
|
|
|
|
else:
|
|
|
|
origurl = 'file://' + filename
|
|
|
|
return addinfourl(open(localfile, 'rb'), headers, origurl)
|
2007-03-13 05:14:27 -03:00
|
|
|
except OSError, msg:
|
|
|
|
# urllib2 users shouldn't expect OSErrors coming from urlopen()
|
|
|
|
raise URLError(msg)
|
2000-02-10 13:17:14 -04:00
|
|
|
raise URLError('file not on local host')
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
class FTPHandler(BaseHandler):
|
|
|
|
def ftp_open(self, req):
|
2006-05-17 12:17:00 -03:00
|
|
|
import ftplib
|
|
|
|
import mimetypes
|
2000-02-10 13:17:14 -04:00
|
|
|
host = req.get_host()
|
|
|
|
if not host:
|
2008-01-24 03:40:51 -04:00
|
|
|
raise URLError('ftp error: no host given')
|
2004-02-15 17:19:18 -04:00
|
|
|
host, port = splitport(host)
|
|
|
|
if port is None:
|
|
|
|
port = ftplib.FTP_PORT
|
2004-07-11 14:14:13 -03:00
|
|
|
else:
|
|
|
|
port = int(port)
|
2004-02-15 17:19:18 -04:00
|
|
|
|
|
|
|
# username/password handling
|
|
|
|
user, host = splituser(host)
|
|
|
|
if user:
|
|
|
|
user, passwd = splitpasswd(user)
|
|
|
|
else:
|
|
|
|
passwd = None
|
|
|
|
host = unquote(host)
|
2010-11-20 07:24:08 -04:00
|
|
|
user = user or ''
|
|
|
|
passwd = passwd or ''
|
2004-02-15 17:19:18 -04:00
|
|
|
|
2000-10-12 15:54:18 -03:00
|
|
|
try:
|
|
|
|
host = socket.gethostbyname(host)
|
|
|
|
except socket.error, msg:
|
|
|
|
raise URLError(msg)
|
2000-02-10 13:17:14 -04:00
|
|
|
path, attrs = splitattr(req.get_selector())
|
2001-02-09 07:10:16 -04:00
|
|
|
dirs = path.split('/')
|
2004-02-15 16:51:39 -04:00
|
|
|
dirs = map(unquote, dirs)
|
2000-02-10 13:17:14 -04:00
|
|
|
dirs, file = dirs[:-1], dirs[-1]
|
|
|
|
if dirs and not dirs[0]:
|
|
|
|
dirs = dirs[1:]
|
|
|
|
try:
|
2007-06-06 14:15:23 -03:00
|
|
|
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
|
2000-02-10 13:17:14 -04:00
|
|
|
type = file and 'I' or 'D'
|
|
|
|
for attr in attrs:
|
2004-07-11 14:14:13 -03:00
|
|
|
attr, value = splitvalue(attr)
|
2001-02-09 07:10:16 -04:00
|
|
|
if attr.lower() == 'type' and \
|
2000-02-10 13:17:14 -04:00
|
|
|
value in ('a', 'A', 'i', 'I', 'd', 'D'):
|
2001-02-09 07:10:16 -04:00
|
|
|
type = value.upper()
|
2000-02-10 13:17:14 -04:00
|
|
|
fp, retrlen = fw.retrfile(file, type)
|
2001-08-24 10:10:13 -03:00
|
|
|
headers = ""
|
|
|
|
mtype = mimetypes.guess_type(req.get_full_url())[0]
|
|
|
|
if mtype:
|
2006-08-20 10:15:39 -03:00
|
|
|
headers += "Content-type: %s\n" % mtype
|
2000-02-10 13:17:14 -04:00
|
|
|
if retrlen is not None and retrlen >= 0:
|
2006-08-20 10:15:39 -03:00
|
|
|
headers += "Content-length: %d\n" % retrlen
|
2001-08-24 10:10:13 -03:00
|
|
|
sf = StringIO(headers)
|
|
|
|
headers = mimetools.Message(sf)
|
2000-02-10 13:17:14 -04:00
|
|
|
return addinfourl(fp, headers, req.get_full_url())
|
|
|
|
except ftplib.all_errors, msg:
|
2008-01-24 03:40:51 -04:00
|
|
|
raise URLError, ('ftp error: %s' % msg), sys.exc_info()[2]
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2007-06-06 14:15:23 -03:00
|
|
|
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
|
2011-07-23 10:51:16 -03:00
|
|
|
fw = ftpwrapper(user, passwd, host, port, dirs, timeout,
|
|
|
|
persistent=False)
|
2000-01-20 14:19:08 -04:00
|
|
|
## fw.ftp.set_debuglevel(1)
|
|
|
|
return fw
|
|
|
|
|
|
|
|
class CacheFTPHandler(FTPHandler):
|
|
|
|
# XXX would be nice to have pluggable cache strategies
|
|
|
|
# XXX this stuff is definitely not thread safe
|
|
|
|
def __init__(self):
|
|
|
|
self.cache = {}
|
|
|
|
self.timeout = {}
|
|
|
|
self.soonest = 0
|
|
|
|
self.delay = 60
|
2000-02-10 13:17:14 -04:00
|
|
|
self.max_conns = 16
|
2000-01-20 14:19:08 -04:00
|
|
|
|
|
|
|
def setTimeout(self, t):
|
|
|
|
self.delay = t
|
|
|
|
|
|
|
|
def setMaxConns(self, m):
|
2000-02-10 13:17:14 -04:00
|
|
|
self.max_conns = m
|
2000-01-20 14:19:08 -04:00
|
|
|
|
2007-06-06 14:15:23 -03:00
|
|
|
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
|
|
|
|
key = user, host, port, '/'.join(dirs), timeout
|
2002-06-01 11:18:47 -03:00
|
|
|
if key in self.cache:
|
2000-01-20 14:19:08 -04:00
|
|
|
self.timeout[key] = time.time() + self.delay
|
|
|
|
else:
|
2007-06-06 14:15:23 -03:00
|
|
|
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout)
|
2000-01-20 14:19:08 -04:00
|
|
|
self.timeout[key] = time.time() + self.delay
|
2000-02-10 13:17:14 -04:00
|
|
|
self.check_cache()
|
2000-01-20 14:19:08 -04:00
|
|
|
return self.cache[key]
|
|
|
|
|
|
|
|
def check_cache(self):
|
2000-02-10 13:17:14 -04:00
|
|
|
# first check for old ones
|
2000-01-20 14:19:08 -04:00
|
|
|
t = time.time()
|
|
|
|
if self.soonest <= t:
|
2003-05-23 05:51:51 -03:00
|
|
|
for k, v in self.timeout.items():
|
2000-01-20 14:19:08 -04:00
|
|
|
if v < t:
|
|
|
|
self.cache[k].close()
|
|
|
|
del self.cache[k]
|
|
|
|
del self.timeout[k]
|
|
|
|
self.soonest = min(self.timeout.values())
|
|
|
|
|
|
|
|
# then check the size
|
2000-02-10 13:17:14 -04:00
|
|
|
if len(self.cache) == self.max_conns:
|
2003-05-17 16:51:26 -03:00
|
|
|
for k, v in self.timeout.items():
|
2000-02-10 13:17:14 -04:00
|
|
|
if v == self.soonest:
|
|
|
|
del self.cache[k]
|
|
|
|
del self.timeout[k]
|
|
|
|
break
|
|
|
|
self.soonest = min(self.timeout.values())
|
2011-07-23 10:51:16 -03:00
|
|
|
|
|
|
|
def clear_cache(self):
|
|
|
|
for conn in self.cache.values():
|
|
|
|
conn.close()
|
|
|
|
self.cache.clear()
|
|
|
|
self.timeout.clear()
|