2008-06-18 17:49:58 -03:00
|
|
|
"""An extensible library for opening URLs using a variety of protocols
|
|
|
|
|
|
|
|
The simplest way to use this module is to call the urlopen function,
|
|
|
|
which accepts a string containing a URL or a Request object (described
|
|
|
|
below). It opens the URL and returns the results as file-like
|
|
|
|
object; the returned object has some extra methods described below.
|
|
|
|
|
|
|
|
The OpenerDirector manages a collection of Handler objects that do
|
|
|
|
all the actual work. Each Handler implements a particular protocol or
|
|
|
|
option. The OpenerDirector is a composite object that invokes the
|
|
|
|
Handlers needed to open the requested URL. For example, the
|
|
|
|
HTTPHandler performs HTTP GET and POST requests and deals with
|
|
|
|
non-error returns. The HTTPRedirectHandler automatically deals with
|
|
|
|
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
|
|
|
|
deals with digest authentication.
|
|
|
|
|
|
|
|
urlopen(url, data=None) -- Basic usage is the same as original
|
|
|
|
urllib. pass the url and optionally data to post to an HTTP URL, and
|
|
|
|
get a file-like object back. One difference is that you can also pass
|
|
|
|
a Request instance instead of URL. Raises a URLError (subclass of
|
2012-12-25 10:47:37 -04:00
|
|
|
OSError); for HTTP errors, raises an HTTPError, which can also be
|
2008-06-18 17:49:58 -03:00
|
|
|
treated as a valid response.
|
|
|
|
|
|
|
|
build_opener -- Function that creates a new OpenerDirector instance.
|
|
|
|
Will install the default handlers. Accepts one or more Handlers as
|
|
|
|
arguments, either instances or Handler classes that it will
|
|
|
|
instantiate. If one of the argument is a subclass of the default
|
|
|
|
handler, the argument will be installed instead of the default.
|
|
|
|
|
|
|
|
install_opener -- Installs a new opener as the default opener.
|
|
|
|
|
|
|
|
objects of interest:
|
2009-11-15 02:20:55 -04:00
|
|
|
|
2009-12-20 03:10:31 -04:00
|
|
|
OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
|
|
|
|
the Handler classes, while dealing with requests and responses.
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
Request -- An object that encapsulates the state of a request. The
|
|
|
|
state can be as simple as the URL. It can also include extra HTTP
|
|
|
|
headers, e.g. a User-Agent.
|
|
|
|
|
|
|
|
BaseHandler --
|
|
|
|
|
|
|
|
internals:
|
|
|
|
BaseHandler and parent
|
|
|
|
_call_chain conventions
|
|
|
|
|
|
|
|
Example usage:
|
|
|
|
|
2008-06-23 08:44:14 -03:00
|
|
|
import urllib.request
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# set up authentication info
|
2008-06-23 08:44:14 -03:00
|
|
|
authinfo = urllib.request.HTTPBasicAuthHandler()
|
2008-06-18 17:49:58 -03:00
|
|
|
authinfo.add_password(realm='PDQ Application',
|
|
|
|
uri='https://mahler:8092/site-updates.py',
|
|
|
|
user='klem',
|
|
|
|
passwd='geheim$parole')
|
|
|
|
|
2008-06-23 08:44:14 -03:00
|
|
|
proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"})
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# build a new opener that adds authentication and caching FTP handlers
|
2008-06-23 08:44:14 -03:00
|
|
|
opener = urllib.request.build_opener(proxy_support, authinfo,
|
|
|
|
urllib.request.CacheFTPHandler)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# install it
|
2008-06-23 08:44:14 -03:00
|
|
|
urllib.request.install_opener(opener)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2008-06-23 08:44:14 -03:00
|
|
|
f = urllib.request.urlopen('http://www.python.org/')
|
2008-06-18 17:49:58 -03:00
|
|
|
"""
|
|
|
|
|
|
|
|
# XXX issues:
|
|
|
|
# If an authentication error handler that tries to perform
|
|
|
|
# authentication for some reason but fails, how should the error be
|
|
|
|
# signalled? The client needs to know the HTTP error code. But if
|
|
|
|
# the handler knows that the problem was, e.g., that it didn't know
|
|
|
|
# that hash algo that requested in the challenge, it would be good to
|
|
|
|
# pass that information along to the client, too.
|
|
|
|
# ftp errors aren't handled cleanly
|
|
|
|
# check digest against correct (i.e. non-apache) implementation
|
|
|
|
|
|
|
|
# Possible extensions:
|
|
|
|
# complex proxies XXX not sure what exactly was meant by this
|
|
|
|
# abstract factory for opener
|
|
|
|
|
|
|
|
import base64
|
2009-03-31 11:35:53 -03:00
|
|
|
import bisect
|
2008-06-18 17:49:58 -03:00
|
|
|
import email
|
|
|
|
import hashlib
|
|
|
|
import http.client
|
|
|
|
import io
|
|
|
|
import os
|
|
|
|
import posixpath
|
|
|
|
import re
|
|
|
|
import socket
|
|
|
|
import sys
|
|
|
|
import time
|
2010-12-19 06:49:52 -04:00
|
|
|
import collections
|
2012-03-13 23:29:33 -03:00
|
|
|
import tempfile
|
|
|
|
import contextlib
|
2012-03-14 17:43:53 -03:00
|
|
|
import warnings
|
2012-03-13 23:29:33 -03:00
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2008-07-01 16:56:00 -03:00
|
|
|
from urllib.error import URLError, HTTPError, ContentTooShortError
|
|
|
|
from urllib.parse import (
|
|
|
|
urlparse, urlsplit, urljoin, unwrap, quote, unquote,
|
|
|
|
splittype, splithost, splitport, splituser, splitpasswd,
|
2012-11-24 12:59:08 -04:00
|
|
|
splitattr, splitquery, splitvalue, splittag, to_bytes,
|
|
|
|
unquote_to_bytes, urlunparse)
|
2008-07-01 16:56:00 -03:00
|
|
|
from urllib.response import addinfourl, addclosehook
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# check for SSL
|
|
|
|
try:
|
|
|
|
import ssl
|
2013-07-04 18:43:24 -03:00
|
|
|
except ImportError:
|
2008-06-18 17:49:58 -03:00
|
|
|
_have_ssl = False
|
|
|
|
else:
|
|
|
|
_have_ssl = True
|
|
|
|
|
2011-11-01 12:20:31 -03:00
|
|
|
__all__ = [
|
|
|
|
# Classes
|
|
|
|
'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler',
|
|
|
|
'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler',
|
|
|
|
'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm',
|
|
|
|
'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
|
|
|
|
'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler',
|
2012-11-24 12:59:08 -04:00
|
|
|
'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', 'DataHandler',
|
2011-11-01 12:20:31 -03:00
|
|
|
'UnknownHandler', 'HTTPErrorProcessor',
|
|
|
|
# Functions
|
|
|
|
'urlopen', 'install_opener', 'build_opener',
|
|
|
|
'pathname2url', 'url2pathname', 'getproxies',
|
|
|
|
# Legacy interface
|
|
|
|
'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener',
|
|
|
|
]
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
# used in User-Agent header sent
|
|
|
|
__version__ = sys.version[:3]
|
|
|
|
|
|
|
|
_opener = None
|
2010-10-13 07:36:15 -03:00
|
|
|
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
2012-05-16 16:40:01 -03:00
|
|
|
*, cafile=None, capath=None, cadefault=False):
|
2008-06-18 17:49:58 -03:00
|
|
|
global _opener
|
2012-05-16 16:40:01 -03:00
|
|
|
if cafile or capath or cadefault:
|
2010-10-13 07:36:15 -03:00
|
|
|
if not _have_ssl:
|
|
|
|
raise ValueError('SSL support not available')
|
2013-11-23 17:43:47 -04:00
|
|
|
context = ssl._create_stdlib_context(cert_reqs=ssl.CERT_REQUIRED,
|
|
|
|
cafile=cafile,
|
|
|
|
capath=capath)
|
2013-04-01 13:55:35 -03:00
|
|
|
https_handler = HTTPSHandler(context=context, check_hostname=True)
|
2010-10-13 07:36:15 -03:00
|
|
|
opener = build_opener(https_handler)
|
|
|
|
elif _opener is None:
|
|
|
|
_opener = opener = build_opener()
|
|
|
|
else:
|
|
|
|
opener = _opener
|
|
|
|
return opener.open(url, data, timeout)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def install_opener(opener):
|
|
|
|
global _opener
|
|
|
|
_opener = opener
|
|
|
|
|
2012-03-13 23:29:33 -03:00
|
|
|
_url_tempfiles = []
|
2008-06-18 17:49:58 -03:00
|
|
|
def urlretrieve(url, filename=None, reporthook=None, data=None):
|
2012-03-13 23:29:33 -03:00
|
|
|
"""
|
|
|
|
Retrieve a URL into a temporary location on disk.
|
|
|
|
|
|
|
|
Requires a URL argument. If a filename is passed, it is used as
|
|
|
|
the temporary file location. The reporthook argument should be
|
|
|
|
a callable that accepts a block number, a read size, and the
|
|
|
|
total file size of the URL target. The data argument should be
|
|
|
|
valid URL encoded data.
|
|
|
|
|
|
|
|
If a filename is passed and the URL points to a local resource,
|
|
|
|
the result is a copy from local file to new file.
|
|
|
|
|
|
|
|
Returns a tuple containing the path to the newly created
|
|
|
|
data file as well as the resulting HTTPMessage object.
|
|
|
|
"""
|
|
|
|
url_type, path = splittype(url)
|
|
|
|
|
|
|
|
with contextlib.closing(urlopen(url, data)) as fp:
|
|
|
|
headers = fp.info()
|
|
|
|
|
|
|
|
# Just return the local path and the "headers" for file://
|
|
|
|
# URLs. No sense in performing a copy unless requested.
|
|
|
|
if url_type == "file" and not filename:
|
|
|
|
return os.path.normpath(path), headers
|
|
|
|
|
|
|
|
# Handle temporary file setup.
|
|
|
|
if filename:
|
|
|
|
tfp = open(filename, 'wb')
|
|
|
|
else:
|
|
|
|
tfp = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
filename = tfp.name
|
|
|
|
_url_tempfiles.append(filename)
|
|
|
|
|
|
|
|
with tfp:
|
|
|
|
result = filename, headers
|
|
|
|
bs = 1024*8
|
|
|
|
size = -1
|
|
|
|
read = 0
|
|
|
|
blocknum = 0
|
|
|
|
if "content-length" in headers:
|
|
|
|
size = int(headers["Content-Length"])
|
|
|
|
|
|
|
|
if reporthook:
|
2012-11-10 17:43:44 -04:00
|
|
|
reporthook(blocknum, bs, size)
|
2012-03-13 23:29:33 -03:00
|
|
|
|
|
|
|
while True:
|
|
|
|
block = fp.read(bs)
|
|
|
|
if not block:
|
|
|
|
break
|
|
|
|
read += len(block)
|
|
|
|
tfp.write(block)
|
|
|
|
blocknum += 1
|
|
|
|
if reporthook:
|
2012-11-10 17:43:44 -04:00
|
|
|
reporthook(blocknum, bs, size)
|
2012-03-13 23:29:33 -03:00
|
|
|
|
|
|
|
if size >= 0 and read < size:
|
|
|
|
raise ContentTooShortError(
|
|
|
|
"retrieval incomplete: got only %i out of %i bytes"
|
|
|
|
% (read, size), result)
|
|
|
|
|
|
|
|
return result
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def urlcleanup():
|
2012-03-13 23:29:33 -03:00
|
|
|
for temp_file in _url_tempfiles:
|
|
|
|
try:
|
|
|
|
os.unlink(temp_file)
|
2012-12-17 17:35:18 -04:00
|
|
|
except OSError:
|
2012-03-13 23:29:33 -03:00
|
|
|
pass
|
|
|
|
|
|
|
|
del _url_tempfiles[:]
|
2008-06-18 17:49:58 -03:00
|
|
|
global _opener
|
|
|
|
if _opener:
|
|
|
|
_opener = None
|
|
|
|
|
|
|
|
# copied from cookielib.py
|
2008-08-19 14:56:33 -03:00
|
|
|
_cut_port_re = re.compile(r":\d+$", re.ASCII)
|
2008-06-18 17:49:58 -03:00
|
|
|
def request_host(request):
|
|
|
|
"""Return request-host, as defined by RFC 2965.
|
|
|
|
|
|
|
|
Variation from RFC: returned value is lowercased, for convenient
|
|
|
|
comparison.
|
|
|
|
|
|
|
|
"""
|
2009-03-31 11:35:53 -03:00
|
|
|
url = request.full_url
|
2008-07-01 16:56:00 -03:00
|
|
|
host = urlparse(url)[1]
|
2008-06-18 17:49:58 -03:00
|
|
|
if host == "":
|
|
|
|
host = request.get_header("Host", "")
|
|
|
|
|
|
|
|
# remove port, if present
|
|
|
|
host = _cut_port_re.sub("", host, 1)
|
|
|
|
return host.lower()
|
|
|
|
|
|
|
|
class Request:
|
|
|
|
|
|
|
|
def __init__(self, url, data=None, headers={},
|
2011-10-16 12:54:44 -03:00
|
|
|
origin_req_host=None, unverifiable=False,
|
|
|
|
method=None):
|
2013-04-25 09:45:48 -03:00
|
|
|
self.full_url = url
|
2008-06-18 17:49:58 -03:00
|
|
|
self.headers = {}
|
2012-11-27 17:06:19 -04:00
|
|
|
self.unredirected_hdrs = {}
|
|
|
|
self._data = None
|
|
|
|
self.data = data
|
2009-07-25 01:24:38 -03:00
|
|
|
self._tunnel_host = None
|
2008-06-18 17:49:58 -03:00
|
|
|
for key, value in headers.items():
|
|
|
|
self.add_header(key, value)
|
|
|
|
if origin_req_host is None:
|
|
|
|
origin_req_host = request_host(self)
|
|
|
|
self.origin_req_host = origin_req_host
|
|
|
|
self.unverifiable = unverifiable
|
2013-09-08 13:47:07 -03:00
|
|
|
if method:
|
|
|
|
self.method = method
|
2013-04-25 09:45:48 -03:00
|
|
|
|
|
|
|
@property
|
|
|
|
def full_url(self):
|
2013-05-24 13:14:12 -03:00
|
|
|
if self.fragment:
|
|
|
|
return '{}#{}'.format(self._full_url, self.fragment)
|
2013-04-25 09:45:48 -03:00
|
|
|
return self._full_url
|
|
|
|
|
|
|
|
@full_url.setter
|
|
|
|
def full_url(self, url):
|
|
|
|
# unwrap('<URL:type://host/path>') --> 'type://host/path'
|
|
|
|
self._full_url = unwrap(url)
|
|
|
|
self._full_url, self.fragment = splittag(self._full_url)
|
2009-03-31 11:35:53 -03:00
|
|
|
self._parse()
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2013-04-25 09:45:48 -03:00
|
|
|
@full_url.deleter
|
|
|
|
def full_url(self):
|
|
|
|
self._full_url = None
|
|
|
|
self.fragment = None
|
|
|
|
self.selector = ''
|
|
|
|
|
2012-11-27 17:06:19 -04:00
|
|
|
@property
|
|
|
|
def data(self):
|
|
|
|
return self._data
|
|
|
|
|
|
|
|
@data.setter
|
|
|
|
def data(self, data):
|
|
|
|
if data != self._data:
|
|
|
|
self._data = data
|
|
|
|
# issue 16464
|
|
|
|
# if we change data we need to remove content-length header
|
|
|
|
# (cause it's most probably calculated for previous value)
|
|
|
|
if self.has_header("Content-length"):
|
|
|
|
self.remove_header("Content-length")
|
|
|
|
|
|
|
|
@data.deleter
|
|
|
|
def data(self):
|
2013-03-20 01:10:51 -03:00
|
|
|
self.data = None
|
2012-11-27 17:06:19 -04:00
|
|
|
|
2009-03-31 11:35:53 -03:00
|
|
|
def _parse(self):
|
2013-04-25 09:45:48 -03:00
|
|
|
self.type, rest = splittype(self._full_url)
|
2009-03-31 11:35:53 -03:00
|
|
|
if self.type is None:
|
2013-04-03 07:58:34 -03:00
|
|
|
raise ValueError("unknown url type: %r" % self.full_url)
|
2009-03-31 11:35:53 -03:00
|
|
|
self.host, self.selector = splithost(rest)
|
|
|
|
if self.host:
|
|
|
|
self.host = unquote(self.host)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def get_method(self):
|
2011-10-16 12:54:44 -03:00
|
|
|
"""Return a string indicating the HTTP request method."""
|
2013-09-08 13:54:33 -03:00
|
|
|
default_method = "POST" if self.data is not None else "GET"
|
|
|
|
return getattr(self, 'method', default_method)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2012-03-14 17:43:53 -03:00
|
|
|
def get_full_url(self):
|
2013-04-25 09:45:48 -03:00
|
|
|
return self.full_url
|
2012-03-14 17:43:53 -03:00
|
|
|
|
2009-03-31 11:35:53 -03:00
|
|
|
def set_proxy(self, host, type):
|
2009-07-25 01:24:38 -03:00
|
|
|
if self.type == 'https' and not self._tunnel_host:
|
|
|
|
self._tunnel_host = self.host
|
|
|
|
else:
|
|
|
|
self.type= type
|
|
|
|
self.selector = self.full_url
|
|
|
|
self.host = host
|
2009-03-31 11:35:53 -03:00
|
|
|
|
|
|
|
def has_proxy(self):
|
|
|
|
return self.selector == self.full_url
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def add_header(self, key, val):
|
|
|
|
# useful for something like authentication
|
|
|
|
self.headers[key.capitalize()] = val
|
|
|
|
|
|
|
|
def add_unredirected_header(self, key, val):
|
|
|
|
# will not be added to a redirected request
|
|
|
|
self.unredirected_hdrs[key.capitalize()] = val
|
|
|
|
|
|
|
|
def has_header(self, header_name):
|
|
|
|
return (header_name in self.headers or
|
|
|
|
header_name in self.unredirected_hdrs)
|
|
|
|
|
|
|
|
def get_header(self, header_name, default=None):
|
|
|
|
return self.headers.get(
|
|
|
|
header_name,
|
|
|
|
self.unredirected_hdrs.get(header_name, default))
|
|
|
|
|
2012-11-27 17:06:19 -04:00
|
|
|
def remove_header(self, header_name):
|
|
|
|
self.headers.pop(header_name, None)
|
|
|
|
self.unredirected_hdrs.pop(header_name, None)
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
def header_items(self):
|
|
|
|
hdrs = self.unredirected_hdrs.copy()
|
|
|
|
hdrs.update(self.headers)
|
|
|
|
return list(hdrs.items())
|
|
|
|
|
|
|
|
class OpenerDirector:
|
|
|
|
def __init__(self):
|
|
|
|
client_version = "Python-urllib/%s" % __version__
|
|
|
|
self.addheaders = [('User-agent', client_version)]
|
2010-12-23 15:44:49 -04:00
|
|
|
# self.handlers is retained only for backward compatibility
|
2008-06-18 17:49:58 -03:00
|
|
|
self.handlers = []
|
2010-12-23 15:44:49 -04:00
|
|
|
# manage the individual handlers
|
2008-06-18 17:49:58 -03:00
|
|
|
self.handle_open = {}
|
|
|
|
self.handle_error = {}
|
|
|
|
self.process_response = {}
|
|
|
|
self.process_request = {}
|
|
|
|
|
|
|
|
def add_handler(self, handler):
|
|
|
|
if not hasattr(handler, "add_parent"):
|
|
|
|
raise TypeError("expected BaseHandler instance, got %r" %
|
|
|
|
type(handler))
|
|
|
|
|
|
|
|
added = False
|
|
|
|
for meth in dir(handler):
|
|
|
|
if meth in ["redirect_request", "do_open", "proxy_open"]:
|
|
|
|
# oops, coincidental match
|
|
|
|
continue
|
|
|
|
|
|
|
|
i = meth.find("_")
|
|
|
|
protocol = meth[:i]
|
|
|
|
condition = meth[i+1:]
|
|
|
|
|
|
|
|
if condition.startswith("error"):
|
|
|
|
j = condition.find("_") + i + 1
|
|
|
|
kind = meth[j+1:]
|
|
|
|
try:
|
|
|
|
kind = int(kind)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
lookup = self.handle_error.get(protocol, {})
|
|
|
|
self.handle_error[protocol] = lookup
|
|
|
|
elif condition == "open":
|
|
|
|
kind = protocol
|
|
|
|
lookup = self.handle_open
|
|
|
|
elif condition == "response":
|
|
|
|
kind = protocol
|
|
|
|
lookup = self.process_response
|
|
|
|
elif condition == "request":
|
|
|
|
kind = protocol
|
|
|
|
lookup = self.process_request
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
handlers = lookup.setdefault(kind, [])
|
|
|
|
if handlers:
|
|
|
|
bisect.insort(handlers, handler)
|
|
|
|
else:
|
|
|
|
handlers.append(handler)
|
|
|
|
added = True
|
|
|
|
|
|
|
|
if added:
|
|
|
|
bisect.insort(self.handlers, handler)
|
|
|
|
handler.add_parent(self)
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
# Only exists for backwards compatibility.
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _call_chain(self, chain, kind, meth_name, *args):
|
|
|
|
# Handlers raise an exception if no one else should try to handle
|
|
|
|
# the request, or return None if they can't but another handler
|
|
|
|
# could. Otherwise, they return the response.
|
|
|
|
handlers = chain.get(kind, ())
|
|
|
|
for handler in handlers:
|
|
|
|
func = getattr(handler, meth_name)
|
|
|
|
result = func(*args)
|
|
|
|
if result is not None:
|
|
|
|
return result
|
|
|
|
|
|
|
|
def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
|
|
|
|
# accept a URL or a Request object
|
|
|
|
if isinstance(fullurl, str):
|
|
|
|
req = Request(fullurl, data)
|
|
|
|
else:
|
|
|
|
req = fullurl
|
|
|
|
if data is not None:
|
2009-03-31 11:35:53 -03:00
|
|
|
req.data = data
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
req.timeout = timeout
|
2009-03-31 11:35:53 -03:00
|
|
|
protocol = req.type
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# pre-process request
|
|
|
|
meth_name = protocol+"_request"
|
|
|
|
for processor in self.process_request.get(protocol, []):
|
|
|
|
meth = getattr(processor, meth_name)
|
|
|
|
req = meth(req)
|
|
|
|
|
|
|
|
response = self._open(req, data)
|
|
|
|
|
|
|
|
# post-process response
|
|
|
|
meth_name = protocol+"_response"
|
|
|
|
for processor in self.process_response.get(protocol, []):
|
|
|
|
meth = getattr(processor, meth_name)
|
|
|
|
response = meth(req, response)
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
def _open(self, req, data=None):
|
|
|
|
result = self._call_chain(self.handle_open, 'default',
|
|
|
|
'default_open', req)
|
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
|
2009-03-31 11:35:53 -03:00
|
|
|
protocol = req.type
|
2008-06-18 17:49:58 -03:00
|
|
|
result = self._call_chain(self.handle_open, protocol, protocol +
|
|
|
|
'_open', req)
|
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
|
|
|
|
return self._call_chain(self.handle_open, 'unknown',
|
|
|
|
'unknown_open', req)
|
|
|
|
|
|
|
|
def error(self, proto, *args):
|
|
|
|
if proto in ('http', 'https'):
|
|
|
|
# XXX http[s] protocols are special-cased
|
|
|
|
dict = self.handle_error['http'] # https is not different than http
|
|
|
|
proto = args[2] # YUCK!
|
|
|
|
meth_name = 'http_error_%s' % proto
|
|
|
|
http_err = 1
|
|
|
|
orig_args = args
|
|
|
|
else:
|
|
|
|
dict = self.handle_error
|
|
|
|
meth_name = proto + '_error'
|
|
|
|
http_err = 0
|
|
|
|
args = (dict, proto, meth_name) + args
|
|
|
|
result = self._call_chain(*args)
|
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
|
|
|
|
if http_err:
|
|
|
|
args = (dict, 'default', 'http_error_default') + orig_args
|
|
|
|
return self._call_chain(*args)
|
|
|
|
|
|
|
|
# XXX probably also want an abstract factory that knows when it makes
|
|
|
|
# sense to skip a superclass in favor of a subclass and when it might
|
|
|
|
# make sense to include both
|
|
|
|
|
|
|
|
def build_opener(*handlers):
|
|
|
|
"""Create an opener object from a list of handlers.
|
|
|
|
|
|
|
|
The opener will use several default handlers, including support
|
2009-11-15 02:20:55 -04:00
|
|
|
for HTTP, FTP and when applicable HTTPS.
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
If any of the handlers passed as arguments are subclasses of the
|
|
|
|
default handlers, the default handlers will not be used.
|
|
|
|
"""
|
|
|
|
def isclass(obj):
|
|
|
|
return isinstance(obj, type) or hasattr(obj, "__bases__")
|
|
|
|
|
|
|
|
opener = OpenerDirector()
|
|
|
|
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
|
|
|
|
HTTPDefaultErrorHandler, HTTPRedirectHandler,
|
2012-11-24 12:59:08 -04:00
|
|
|
FTPHandler, FileHandler, HTTPErrorProcessor,
|
|
|
|
DataHandler]
|
2008-06-18 17:49:58 -03:00
|
|
|
if hasattr(http.client, "HTTPSConnection"):
|
|
|
|
default_classes.append(HTTPSHandler)
|
|
|
|
skip = set()
|
|
|
|
for klass in default_classes:
|
|
|
|
for check in handlers:
|
|
|
|
if isclass(check):
|
|
|
|
if issubclass(check, klass):
|
|
|
|
skip.add(klass)
|
|
|
|
elif isinstance(check, klass):
|
|
|
|
skip.add(klass)
|
|
|
|
for klass in skip:
|
|
|
|
default_classes.remove(klass)
|
|
|
|
|
|
|
|
for klass in default_classes:
|
|
|
|
opener.add_handler(klass())
|
|
|
|
|
|
|
|
for h in handlers:
|
|
|
|
if isclass(h):
|
|
|
|
h = h()
|
|
|
|
opener.add_handler(h)
|
|
|
|
return opener
|
|
|
|
|
|
|
|
class BaseHandler:
|
|
|
|
handler_order = 500
|
|
|
|
|
|
|
|
def add_parent(self, parent):
|
|
|
|
self.parent = parent
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
# Only exists for backwards compatibility
|
|
|
|
pass
|
|
|
|
|
|
|
|
def __lt__(self, other):
|
|
|
|
if not hasattr(other, "handler_order"):
|
|
|
|
# Try to preserve the old behavior of having custom classes
|
|
|
|
# inserted after default ones (works only for custom user
|
|
|
|
# classes which are not aware of handler_order).
|
|
|
|
return True
|
|
|
|
return self.handler_order < other.handler_order
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPErrorProcessor(BaseHandler):
|
|
|
|
"""Process HTTP error responses."""
|
|
|
|
handler_order = 1000 # after all other processing
|
|
|
|
|
|
|
|
def http_response(self, request, response):
|
|
|
|
code, msg, hdrs = response.code, response.msg, response.info()
|
|
|
|
|
|
|
|
# According to RFC 2616, "2xx" code indicates that the client's
|
|
|
|
# request was successfully received, understood, and accepted.
|
|
|
|
if not (200 <= code < 300):
|
|
|
|
response = self.parent.error(
|
|
|
|
'http', request, response, code, msg, hdrs)
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
https_response = http_response
|
|
|
|
|
|
|
|
class HTTPDefaultErrorHandler(BaseHandler):
|
|
|
|
def http_error_default(self, req, fp, code, msg, hdrs):
|
2009-03-31 11:35:53 -03:00
|
|
|
raise HTTPError(req.full_url, code, msg, hdrs, fp)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
class HTTPRedirectHandler(BaseHandler):
|
|
|
|
# maximum number of redirections to any single URL
|
|
|
|
# this is needed because of the state that cookies introduce
|
|
|
|
max_repeats = 4
|
|
|
|
# maximum total number of redirections (regardless of URL) before
|
|
|
|
# assuming we're in a loop
|
|
|
|
max_redirections = 10
|
|
|
|
|
|
|
|
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
|
|
|
"""Return a Request or None in response to a redirect.
|
|
|
|
|
|
|
|
This is called by the http_error_30x methods when a
|
|
|
|
redirection response is received. If a redirection should
|
|
|
|
take place, return a new Request to allow http_error_30x to
|
|
|
|
perform the redirect. Otherwise, raise HTTPError if no-one
|
|
|
|
else should try to handle this url. Return None if you can't
|
|
|
|
but another Handler might.
|
|
|
|
"""
|
|
|
|
m = req.get_method()
|
|
|
|
if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
|
|
|
|
or code in (301, 302, 303) and m == "POST")):
|
2009-03-31 11:35:53 -03:00
|
|
|
raise HTTPError(req.full_url, code, msg, headers, fp)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# Strictly (according to RFC 2616), 301 or 302 in response to
|
|
|
|
# a POST MUST NOT cause a redirection without confirmation
|
2008-06-23 08:44:14 -03:00
|
|
|
# from the user (of urllib.request, in this case). In practice,
|
2008-06-18 17:49:58 -03:00
|
|
|
# essentially all clients do redirect in this case, so we do
|
|
|
|
# the same.
|
|
|
|
# be conciliant with URIs containing a space
|
|
|
|
newurl = newurl.replace(' ', '%20')
|
|
|
|
CONTENT_HEADERS = ("content-length", "content-type")
|
|
|
|
newheaders = dict((k, v) for k, v in req.headers.items()
|
|
|
|
if k.lower() not in CONTENT_HEADERS)
|
|
|
|
return Request(newurl,
|
|
|
|
headers=newheaders,
|
2009-03-31 11:35:53 -03:00
|
|
|
origin_req_host=req.origin_req_host,
|
2008-06-18 17:49:58 -03:00
|
|
|
unverifiable=True)
|
|
|
|
|
|
|
|
# Implementation note: To avoid the server sending us into an
|
|
|
|
# infinite loop, the request object needs to track what URLs we
|
|
|
|
# have already seen. Do this by adding a handler-specific
|
|
|
|
# attribute to the Request object.
|
|
|
|
def http_error_302(self, req, fp, code, msg, headers):
|
|
|
|
# Some servers (incorrectly) return multiple Location headers
|
|
|
|
# (so probably same goes for URI). Use first header.
|
|
|
|
if "location" in headers:
|
|
|
|
newurl = headers["location"]
|
|
|
|
elif "uri" in headers:
|
|
|
|
newurl = headers["uri"]
|
|
|
|
else:
|
|
|
|
return
|
2008-08-17 00:36:03 -03:00
|
|
|
|
|
|
|
# fix a possible malformed URL
|
|
|
|
urlparts = urlparse(newurl)
|
2011-03-29 15:41:02 -03:00
|
|
|
|
|
|
|
# For security reasons we don't allow redirection to anything other
|
|
|
|
# than http, https or ftp.
|
|
|
|
|
2012-01-04 01:46:59 -04:00
|
|
|
if urlparts.scheme not in ('http', 'https', 'ftp', ''):
|
2011-10-19 15:48:01 -03:00
|
|
|
raise HTTPError(
|
|
|
|
newurl, code,
|
|
|
|
"%s - Redirection to url '%s' is not allowed" % (msg, newurl),
|
|
|
|
headers, fp)
|
2011-03-29 15:41:02 -03:00
|
|
|
|
2008-08-17 00:36:03 -03:00
|
|
|
if not urlparts.path:
|
|
|
|
urlparts = list(urlparts)
|
|
|
|
urlparts[2] = "/"
|
|
|
|
newurl = urlunparse(urlparts)
|
|
|
|
|
2009-03-31 11:35:53 -03:00
|
|
|
newurl = urljoin(req.full_url, newurl)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# XXX Probably want to forget about the state of the current
|
|
|
|
# request, although that might interact poorly with other
|
|
|
|
# handlers that also use handler-specific request attributes
|
|
|
|
new = self.redirect_request(req, fp, code, msg, headers, newurl)
|
|
|
|
if new is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# loop detection
|
|
|
|
# .redirect_dict has a key url if url was previously visited.
|
|
|
|
if hasattr(req, 'redirect_dict'):
|
|
|
|
visited = new.redirect_dict = req.redirect_dict
|
|
|
|
if (visited.get(newurl, 0) >= self.max_repeats or
|
|
|
|
len(visited) >= self.max_redirections):
|
2009-03-31 11:35:53 -03:00
|
|
|
raise HTTPError(req.full_url, code,
|
2008-07-01 16:56:00 -03:00
|
|
|
self.inf_msg + msg, headers, fp)
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
visited = new.redirect_dict = req.redirect_dict = {}
|
|
|
|
visited[newurl] = visited.get(newurl, 0) + 1
|
|
|
|
|
|
|
|
# Don't close the fp until we are sure that we won't use it
|
|
|
|
# with HTTPError.
|
|
|
|
fp.read()
|
|
|
|
fp.close()
|
|
|
|
|
2009-07-18 23:44:19 -03:00
|
|
|
return self.parent.open(new, timeout=req.timeout)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
http_error_301 = http_error_303 = http_error_307 = http_error_302
|
|
|
|
|
|
|
|
inf_msg = "The HTTP server returned a redirect error that would " \
|
|
|
|
"lead to an infinite loop.\n" \
|
|
|
|
"The last 30x error message was:\n"
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_proxy(proxy):
|
|
|
|
"""Return (scheme, user, password, host/port) given a URL or an authority.
|
|
|
|
|
|
|
|
If a URL is supplied, it must have an authority (host:port) component.
|
|
|
|
According to RFC 3986, having an authority component means the URL must
|
|
|
|
have two slashes after the scheme:
|
|
|
|
|
|
|
|
>>> _parse_proxy('file:/ftp.example.com/')
|
|
|
|
Traceback (most recent call last):
|
|
|
|
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
|
|
|
|
|
|
|
|
The first three items of the returned tuple may be None.
|
|
|
|
|
|
|
|
Examples of authority parsing:
|
|
|
|
|
|
|
|
>>> _parse_proxy('proxy.example.com')
|
|
|
|
(None, None, None, 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('proxy.example.com:3128')
|
|
|
|
(None, None, None, 'proxy.example.com:3128')
|
|
|
|
|
|
|
|
The authority component may optionally include userinfo (assumed to be
|
|
|
|
username:password):
|
|
|
|
|
|
|
|
>>> _parse_proxy('joe:password@proxy.example.com')
|
|
|
|
(None, 'joe', 'password', 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('joe:password@proxy.example.com:3128')
|
|
|
|
(None, 'joe', 'password', 'proxy.example.com:3128')
|
|
|
|
|
|
|
|
Same examples, but with URLs instead:
|
|
|
|
|
|
|
|
>>> _parse_proxy('http://proxy.example.com/')
|
|
|
|
('http', None, None, 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('http://proxy.example.com:3128/')
|
|
|
|
('http', None, None, 'proxy.example.com:3128')
|
|
|
|
>>> _parse_proxy('http://joe:password@proxy.example.com/')
|
|
|
|
('http', 'joe', 'password', 'proxy.example.com')
|
|
|
|
>>> _parse_proxy('http://joe:password@proxy.example.com:3128')
|
|
|
|
('http', 'joe', 'password', 'proxy.example.com:3128')
|
|
|
|
|
|
|
|
Everything after the authority is ignored:
|
|
|
|
|
|
|
|
>>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
|
|
|
|
('ftp', 'joe', 'password', 'proxy.example.com')
|
|
|
|
|
|
|
|
Test for no trailing '/' case:
|
|
|
|
|
|
|
|
>>> _parse_proxy('http://joe:password@proxy.example.com')
|
|
|
|
('http', 'joe', 'password', 'proxy.example.com')
|
|
|
|
|
|
|
|
"""
|
2008-07-01 16:56:00 -03:00
|
|
|
scheme, r_scheme = splittype(proxy)
|
2008-06-18 17:49:58 -03:00
|
|
|
if not r_scheme.startswith("/"):
|
|
|
|
# authority
|
|
|
|
scheme = None
|
|
|
|
authority = proxy
|
|
|
|
else:
|
|
|
|
# URL
|
|
|
|
if not r_scheme.startswith("//"):
|
|
|
|
raise ValueError("proxy URL with no authority: %r" % proxy)
|
|
|
|
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
|
|
|
|
# and 3.3.), path is empty or starts with '/'
|
|
|
|
end = r_scheme.find("/", 2)
|
|
|
|
if end == -1:
|
|
|
|
end = None
|
|
|
|
authority = r_scheme[2:end]
|
2008-07-01 16:56:00 -03:00
|
|
|
userinfo, hostport = splituser(authority)
|
2008-06-18 17:49:58 -03:00
|
|
|
if userinfo is not None:
|
2008-07-01 16:56:00 -03:00
|
|
|
user, password = splitpasswd(userinfo)
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
user = password = None
|
|
|
|
return scheme, user, password, hostport
|
|
|
|
|
|
|
|
class ProxyHandler(BaseHandler):
|
|
|
|
# Proxies must be in front
|
|
|
|
handler_order = 100
|
|
|
|
|
|
|
|
def __init__(self, proxies=None):
|
|
|
|
if proxies is None:
|
|
|
|
proxies = getproxies()
|
|
|
|
assert hasattr(proxies, 'keys'), "proxies must be a mapping"
|
|
|
|
self.proxies = proxies
|
|
|
|
for type, url in proxies.items():
|
|
|
|
setattr(self, '%s_open' % type,
|
2012-06-24 14:56:31 -03:00
|
|
|
lambda r, proxy=url, type=type, meth=self.proxy_open:
|
|
|
|
meth(r, proxy, type))
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def proxy_open(self, req, proxy, type):
|
2009-03-31 11:35:53 -03:00
|
|
|
orig_type = req.type
|
2008-06-18 17:49:58 -03:00
|
|
|
proxy_type, user, password, hostport = _parse_proxy(proxy)
|
|
|
|
if proxy_type is None:
|
|
|
|
proxy_type = orig_type
|
2009-10-11 01:58:55 -03:00
|
|
|
|
|
|
|
if req.host and proxy_bypass(req.host):
|
|
|
|
return None
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
if user and password:
|
2008-07-01 16:56:00 -03:00
|
|
|
user_pass = '%s:%s' % (unquote(user),
|
|
|
|
unquote(password))
|
2008-06-18 17:49:58 -03:00
|
|
|
creds = base64.b64encode(user_pass.encode()).decode("ascii")
|
|
|
|
req.add_header('Proxy-authorization', 'Basic ' + creds)
|
2008-07-01 16:56:00 -03:00
|
|
|
hostport = unquote(hostport)
|
2008-06-18 17:49:58 -03:00
|
|
|
req.set_proxy(hostport, proxy_type)
|
2009-07-25 01:24:38 -03:00
|
|
|
if orig_type == proxy_type or orig_type == 'https':
|
2008-06-18 17:49:58 -03:00
|
|
|
# let other handlers take care of it
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
# need to start over, because the other handlers don't
|
|
|
|
# grok the proxy's URL type
|
|
|
|
# e.g. if we have a constructor arg proxies like so:
|
|
|
|
# {'http': 'ftp://proxy.example.com'}, we may end up turning
|
|
|
|
# a request for http://acme.example.com/a into one for
|
|
|
|
# ftp://proxy.example.com/a
|
2009-07-18 23:44:19 -03:00
|
|
|
return self.parent.open(req, timeout=req.timeout)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
class HTTPPasswordMgr:
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.passwd = {}
|
|
|
|
|
|
|
|
def add_password(self, realm, uri, user, passwd):
|
|
|
|
# uri could be a single URI or a sequence
|
|
|
|
if isinstance(uri, str):
|
|
|
|
uri = [uri]
|
2011-10-19 15:48:01 -03:00
|
|
|
if realm not in self.passwd:
|
2008-06-18 17:49:58 -03:00
|
|
|
self.passwd[realm] = {}
|
|
|
|
for default_port in True, False:
|
|
|
|
reduced_uri = tuple(
|
|
|
|
[self.reduce_uri(u, default_port) for u in uri])
|
|
|
|
self.passwd[realm][reduced_uri] = (user, passwd)
|
|
|
|
|
|
|
|
def find_user_password(self, realm, authuri):
|
|
|
|
domains = self.passwd.get(realm, {})
|
|
|
|
for default_port in True, False:
|
|
|
|
reduced_authuri = self.reduce_uri(authuri, default_port)
|
|
|
|
for uris, authinfo in domains.items():
|
|
|
|
for uri in uris:
|
|
|
|
if self.is_suburi(uri, reduced_authuri):
|
|
|
|
return authinfo
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
def reduce_uri(self, uri, default_port=True):
|
|
|
|
"""Accept authority or URI and extract only the authority and path."""
|
|
|
|
# note HTTP URLs do not have a userinfo component
|
2008-07-01 16:56:00 -03:00
|
|
|
parts = urlsplit(uri)
|
2008-06-18 17:49:58 -03:00
|
|
|
if parts[1]:
|
|
|
|
# URI
|
|
|
|
scheme = parts[0]
|
|
|
|
authority = parts[1]
|
|
|
|
path = parts[2] or '/'
|
|
|
|
else:
|
|
|
|
# host or host:port
|
|
|
|
scheme = None
|
|
|
|
authority = uri
|
|
|
|
path = '/'
|
2008-07-01 16:56:00 -03:00
|
|
|
host, port = splitport(authority)
|
2008-06-18 17:49:58 -03:00
|
|
|
if default_port and port is None and scheme is not None:
|
|
|
|
dport = {"http": 80,
|
|
|
|
"https": 443,
|
|
|
|
}.get(scheme)
|
|
|
|
if dport is not None:
|
|
|
|
authority = "%s:%d" % (host, dport)
|
|
|
|
return authority, path
|
|
|
|
|
|
|
|
def is_suburi(self, base, test):
|
|
|
|
"""Check if test is below base in a URI tree
|
|
|
|
|
|
|
|
Both args must be URIs in reduced form.
|
|
|
|
"""
|
|
|
|
if base == test:
|
|
|
|
return True
|
|
|
|
if base[0] != test[0]:
|
|
|
|
return False
|
|
|
|
common = posixpath.commonprefix((base[1], test[1]))
|
|
|
|
if len(common) == len(base[1]):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
|
|
|
|
|
|
|
|
def find_user_password(self, realm, authuri):
|
|
|
|
user, password = HTTPPasswordMgr.find_user_password(self, realm,
|
|
|
|
authuri)
|
|
|
|
if user is not None:
|
|
|
|
return user, password
|
|
|
|
return HTTPPasswordMgr.find_user_password(self, None, authuri)
|
|
|
|
|
|
|
|
|
|
|
|
class AbstractBasicAuthHandler:
|
|
|
|
|
|
|
|
# XXX this allows for multiple auth-schemes, but will stupidly pick
|
|
|
|
# the last one with a realm specified.
|
|
|
|
|
|
|
|
# allow for double- and single-quoted realm values
|
|
|
|
# (single quotes are a violation of the RFC, but appear in the wild)
|
|
|
|
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
|
2012-05-15 11:30:25 -03:00
|
|
|
'realm=(["\']?)([^"\']*)\\2', re.I)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# XXX could pre-emptively send auth info already accepted (RFC 2617,
|
|
|
|
# end of section 2, and section 1.2 immediately after "credentials"
|
|
|
|
# production).
|
|
|
|
|
|
|
|
def __init__(self, password_mgr=None):
|
|
|
|
if password_mgr is None:
|
|
|
|
password_mgr = HTTPPasswordMgr()
|
|
|
|
self.passwd = password_mgr
|
|
|
|
self.add_password = self.passwd.add_password
|
2010-06-01 09:53:48 -03:00
|
|
|
self.retried = 0
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2010-08-19 14:50:31 -03:00
|
|
|
def reset_retry_count(self):
|
|
|
|
self.retried = 0
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
def http_error_auth_reqed(self, authreq, host, req, headers):
|
|
|
|
# host may be an authority (without userinfo) or a URL with an
|
|
|
|
# authority
|
|
|
|
# XXX could be multiple headers
|
|
|
|
authreq = headers.get(authreq, None)
|
2010-06-01 09:53:48 -03:00
|
|
|
|
|
|
|
if self.retried > 5:
|
|
|
|
# retry sending the username:password 5 times before failing.
|
|
|
|
raise HTTPError(req.get_full_url(), 401, "basic auth failed",
|
|
|
|
headers, None)
|
|
|
|
else:
|
|
|
|
self.retried += 1
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
if authreq:
|
2011-05-11 10:17:57 -03:00
|
|
|
scheme = authreq.split()[0]
|
2011-10-19 15:50:13 -03:00
|
|
|
if scheme.lower() != 'basic':
|
2011-05-11 10:17:57 -03:00
|
|
|
raise ValueError("AbstractBasicAuthHandler does not"
|
|
|
|
" support the following scheme: '%s'" %
|
|
|
|
scheme)
|
|
|
|
else:
|
|
|
|
mo = AbstractBasicAuthHandler.rx.search(authreq)
|
|
|
|
if mo:
|
|
|
|
scheme, quote, realm = mo.groups()
|
2012-05-15 13:03:29 -03:00
|
|
|
if quote not in ['"',"'"]:
|
|
|
|
warnings.warn("Basic Auth Realm was unquoted",
|
|
|
|
UserWarning, 2)
|
2011-05-11 10:17:57 -03:00
|
|
|
if scheme.lower() == 'basic':
|
|
|
|
response = self.retry_http_basic_auth(host, req, realm)
|
|
|
|
if response and response.code != 401:
|
|
|
|
self.retried = 0
|
|
|
|
return response
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def retry_http_basic_auth(self, host, req, realm):
|
|
|
|
user, pw = self.passwd.find_user_password(realm, host)
|
|
|
|
if pw is not None:
|
|
|
|
raw = "%s:%s" % (user, pw)
|
|
|
|
auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
|
|
|
|
if req.headers.get(self.auth_header, None) == auth:
|
|
|
|
return None
|
2010-02-24 12:53:16 -04:00
|
|
|
req.add_unredirected_header(self.auth_header, auth)
|
2009-07-18 23:44:19 -03:00
|
|
|
return self.parent.open(req, timeout=req.timeout)
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
|
|
|
|
|
|
|
|
auth_header = 'Authorization'
|
|
|
|
|
|
|
|
def http_error_401(self, req, fp, code, msg, headers):
|
2009-03-31 11:35:53 -03:00
|
|
|
url = req.full_url
|
2010-08-19 14:50:31 -03:00
|
|
|
response = self.http_error_auth_reqed('www-authenticate',
|
2008-06-18 17:49:58 -03:00
|
|
|
url, req, headers)
|
2010-08-19 14:50:31 -03:00
|
|
|
self.reset_retry_count()
|
|
|
|
return response
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
|
|
|
|
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
|
|
|
|
|
|
|
|
auth_header = 'Proxy-authorization'
|
|
|
|
|
|
|
|
def http_error_407(self, req, fp, code, msg, headers):
|
|
|
|
# http_error_auth_reqed requires that there is no userinfo component in
|
2008-06-23 08:44:14 -03:00
|
|
|
# authority. Assume there isn't one, since urllib.request does not (and
|
2008-06-18 17:49:58 -03:00
|
|
|
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
|
|
|
|
# userinfo.
|
2009-03-31 11:35:53 -03:00
|
|
|
authority = req.host
|
2010-08-19 14:50:31 -03:00
|
|
|
response = self.http_error_auth_reqed('proxy-authenticate',
|
2008-06-18 17:49:58 -03:00
|
|
|
authority, req, headers)
|
2010-08-19 14:50:31 -03:00
|
|
|
self.reset_retry_count()
|
|
|
|
return response
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
|
2011-11-01 12:20:31 -03:00
|
|
|
# Return n random bytes.
|
|
|
|
_randombytes = os.urandom
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
class AbstractDigestAuthHandler:
|
|
|
|
# Digest authentication is specified in RFC 2617.
|
|
|
|
|
|
|
|
# XXX The client does not inspect the Authentication-Info header
|
|
|
|
# in a successful response.
|
|
|
|
|
|
|
|
# XXX It should be possible to test this implementation against
|
|
|
|
# a mock server that just generates a static set of challenges.
|
|
|
|
|
|
|
|
# XXX qop="auth-int" supports is shaky
|
|
|
|
|
|
|
|
def __init__(self, passwd=None):
|
|
|
|
if passwd is None:
|
|
|
|
passwd = HTTPPasswordMgr()
|
|
|
|
self.passwd = passwd
|
|
|
|
self.add_password = self.passwd.add_password
|
|
|
|
self.retried = 0
|
|
|
|
self.nonce_count = 0
|
2009-11-15 04:43:45 -04:00
|
|
|
self.last_nonce = None
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def reset_retry_count(self):
|
|
|
|
self.retried = 0
|
|
|
|
|
|
|
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
|
|
|
authreq = headers.get(auth_header, None)
|
|
|
|
if self.retried > 5:
|
|
|
|
# Don't fail endlessly - if we failed once, we'll probably
|
|
|
|
# fail a second time. Hm. Unless the Password Manager is
|
|
|
|
# prompting for the information. Crap. This isn't great
|
|
|
|
# but it's better than the current 'repeat until recursion
|
|
|
|
# depth exceeded' approach <wink>
|
2009-03-31 11:35:53 -03:00
|
|
|
raise HTTPError(req.full_url, 401, "digest auth failed",
|
2008-07-01 16:56:00 -03:00
|
|
|
headers, None)
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
self.retried += 1
|
|
|
|
if authreq:
|
|
|
|
scheme = authreq.split()[0]
|
|
|
|
if scheme.lower() == 'digest':
|
|
|
|
return self.retry_http_digest_auth(req, authreq)
|
2011-10-19 15:50:13 -03:00
|
|
|
elif scheme.lower() != 'basic':
|
2011-05-11 10:17:57 -03:00
|
|
|
raise ValueError("AbstractDigestAuthHandler does not support"
|
|
|
|
" the following scheme: '%s'" % scheme)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def retry_http_digest_auth(self, req, auth):
|
|
|
|
token, challenge = auth.split(' ', 1)
|
|
|
|
chal = parse_keqv_list(filter(None, parse_http_list(challenge)))
|
|
|
|
auth = self.get_authorization(req, chal)
|
|
|
|
if auth:
|
|
|
|
auth_val = 'Digest %s' % auth
|
|
|
|
if req.headers.get(self.auth_header, None) == auth_val:
|
|
|
|
return None
|
|
|
|
req.add_unredirected_header(self.auth_header, auth_val)
|
2009-07-18 23:44:19 -03:00
|
|
|
resp = self.parent.open(req, timeout=req.timeout)
|
2008-06-18 17:49:58 -03:00
|
|
|
return resp
|
|
|
|
|
|
|
|
def get_cnonce(self, nonce):
|
|
|
|
# The cnonce-value is an opaque
|
|
|
|
# quoted string value provided by the client and used by both client
|
|
|
|
# and server to avoid chosen plaintext attacks, to provide mutual
|
|
|
|
# authentication, and to provide some message integrity protection.
|
|
|
|
# This isn't a fabulous effort, but it's probably Good Enough.
|
|
|
|
s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
|
2011-11-01 12:20:31 -03:00
|
|
|
b = s.encode("ascii") + _randombytes(8)
|
2008-06-18 17:49:58 -03:00
|
|
|
dig = hashlib.sha1(b).hexdigest()
|
|
|
|
return dig[:16]
|
|
|
|
|
|
|
|
def get_authorization(self, req, chal):
|
|
|
|
try:
|
|
|
|
realm = chal['realm']
|
|
|
|
nonce = chal['nonce']
|
|
|
|
qop = chal.get('qop')
|
|
|
|
algorithm = chal.get('algorithm', 'MD5')
|
|
|
|
# mod_digest doesn't send an opaque, even though it isn't
|
|
|
|
# supposed to be optional
|
|
|
|
opaque = chal.get('opaque', None)
|
|
|
|
except KeyError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
H, KD = self.get_algorithm_impls(algorithm)
|
|
|
|
if H is None:
|
|
|
|
return None
|
|
|
|
|
2009-03-31 11:35:53 -03:00
|
|
|
user, pw = self.passwd.find_user_password(realm, req.full_url)
|
2008-06-18 17:49:58 -03:00
|
|
|
if user is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# XXX not implemented yet
|
2009-03-31 11:35:53 -03:00
|
|
|
if req.data is not None:
|
|
|
|
entdig = self.get_entity_digest(req.data, chal)
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
entdig = None
|
|
|
|
|
|
|
|
A1 = "%s:%s:%s" % (user, realm, pw)
|
|
|
|
A2 = "%s:%s" % (req.get_method(),
|
|
|
|
# XXX selector: what about proxies and full urls
|
2009-03-31 11:35:53 -03:00
|
|
|
req.selector)
|
2008-06-18 17:49:58 -03:00
|
|
|
if qop == 'auth':
|
2009-11-15 04:43:45 -04:00
|
|
|
if nonce == self.last_nonce:
|
|
|
|
self.nonce_count += 1
|
|
|
|
else:
|
|
|
|
self.nonce_count = 1
|
|
|
|
self.last_nonce = nonce
|
2008-06-18 17:49:58 -03:00
|
|
|
ncvalue = '%08x' % self.nonce_count
|
|
|
|
cnonce = self.get_cnonce(nonce)
|
|
|
|
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
|
|
|
|
respdig = KD(H(A1), noncebit)
|
|
|
|
elif qop is None:
|
|
|
|
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
|
|
|
|
else:
|
|
|
|
# XXX handle auth-int.
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError("qop '%s' is not supported." % qop)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# XXX should the partial digests be encoded too?
|
|
|
|
|
|
|
|
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
2009-03-31 11:35:53 -03:00
|
|
|
'response="%s"' % (user, realm, nonce, req.selector,
|
2008-06-18 17:49:58 -03:00
|
|
|
respdig)
|
|
|
|
if opaque:
|
|
|
|
base += ', opaque="%s"' % opaque
|
|
|
|
if entdig:
|
|
|
|
base += ', digest="%s"' % entdig
|
|
|
|
base += ', algorithm="%s"' % algorithm
|
|
|
|
if qop:
|
|
|
|
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
|
|
|
return base
|
|
|
|
|
|
|
|
def get_algorithm_impls(self, algorithm):
|
|
|
|
# lambdas assume digest modules are imported at the top level
|
|
|
|
if algorithm == 'MD5':
|
|
|
|
H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
|
|
|
|
elif algorithm == 'SHA':
|
|
|
|
H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
|
|
|
|
# XXX MD5-sess
|
|
|
|
KD = lambda s, d: H("%s:%s" % (s, d))
|
|
|
|
return H, KD
|
|
|
|
|
|
|
|
def get_entity_digest(self, data, chal):
|
|
|
|
# XXX not implemented yet
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
|
|
|
|
"""An authentication protocol defined by RFC 2069
|
|
|
|
|
|
|
|
Digest authentication improves on basic authentication because it
|
|
|
|
does not transmit passwords in the clear.
|
|
|
|
"""
|
|
|
|
|
|
|
|
auth_header = 'Authorization'
|
|
|
|
handler_order = 490 # before Basic auth
|
|
|
|
|
|
|
|
def http_error_401(self, req, fp, code, msg, headers):
|
2009-03-31 11:35:53 -03:00
|
|
|
host = urlparse(req.full_url)[1]
|
2008-06-18 17:49:58 -03:00
|
|
|
retry = self.http_error_auth_reqed('www-authenticate',
|
|
|
|
host, req, headers)
|
|
|
|
self.reset_retry_count()
|
|
|
|
return retry
|
|
|
|
|
|
|
|
|
|
|
|
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
|
|
|
|
|
|
|
|
auth_header = 'Proxy-Authorization'
|
|
|
|
handler_order = 490 # before Basic auth
|
|
|
|
|
|
|
|
def http_error_407(self, req, fp, code, msg, headers):
|
2009-03-31 11:35:53 -03:00
|
|
|
host = req.host
|
2008-06-18 17:49:58 -03:00
|
|
|
retry = self.http_error_auth_reqed('proxy-authenticate',
|
|
|
|
host, req, headers)
|
|
|
|
self.reset_retry_count()
|
|
|
|
return retry
|
|
|
|
|
|
|
|
class AbstractHTTPHandler(BaseHandler):
|
|
|
|
|
|
|
|
def __init__(self, debuglevel=0):
|
|
|
|
self._debuglevel = debuglevel
|
|
|
|
|
|
|
|
def set_http_debuglevel(self, level):
|
|
|
|
self._debuglevel = level
|
|
|
|
|
|
|
|
def do_request_(self, request):
|
2009-03-31 11:35:53 -03:00
|
|
|
host = request.host
|
2008-06-18 17:49:58 -03:00
|
|
|
if not host:
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError('no host given')
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2009-03-31 11:35:53 -03:00
|
|
|
if request.data is not None: # POST
|
|
|
|
data = request.data
|
2011-02-11 07:25:47 -04:00
|
|
|
if isinstance(data, str):
|
2012-06-24 14:56:31 -03:00
|
|
|
msg = "POST data should be bytes or an iterable of bytes. " \
|
|
|
|
"It cannot be of type str."
|
2012-03-15 22:11:16 -03:00
|
|
|
raise TypeError(msg)
|
2008-06-18 17:49:58 -03:00
|
|
|
if not request.has_header('Content-type'):
|
|
|
|
request.add_unredirected_header(
|
|
|
|
'Content-type',
|
|
|
|
'application/x-www-form-urlencoded')
|
|
|
|
if not request.has_header('Content-length'):
|
2010-12-19 06:49:52 -04:00
|
|
|
try:
|
|
|
|
mv = memoryview(data)
|
|
|
|
except TypeError:
|
|
|
|
if isinstance(data, collections.Iterable):
|
2011-02-03 03:46:41 -04:00
|
|
|
raise ValueError("Content-Length should be specified "
|
|
|
|
"for iterable data of type %r %r" % (type(data),
|
2010-12-19 06:49:52 -04:00
|
|
|
data))
|
|
|
|
else:
|
|
|
|
request.add_unredirected_header(
|
2010-12-24 00:03:59 -04:00
|
|
|
'Content-length', '%d' % (len(mv) * mv.itemsize))
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2008-08-16 11:44:32 -03:00
|
|
|
sel_host = host
|
|
|
|
if request.has_proxy():
|
2009-03-31 11:35:53 -03:00
|
|
|
scheme, sel = splittype(request.selector)
|
2008-08-16 11:44:32 -03:00
|
|
|
sel_host, sel_path = splithost(sel)
|
2008-06-18 17:49:58 -03:00
|
|
|
if not request.has_header('Host'):
|
2008-08-16 11:44:32 -03:00
|
|
|
request.add_unredirected_header('Host', sel_host)
|
2008-06-18 17:49:58 -03:00
|
|
|
for name, value in self.parent.addheaders:
|
|
|
|
name = name.capitalize()
|
|
|
|
if not request.has_header(name):
|
|
|
|
request.add_unredirected_header(name, value)
|
|
|
|
|
|
|
|
return request
|
|
|
|
|
2010-10-13 07:36:15 -03:00
|
|
|
def do_open(self, http_class, req, **http_conn_args):
|
2009-03-31 11:35:53 -03:00
|
|
|
"""Return an HTTPResponse object for the request, using http_class.
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
http_class must implement the HTTPConnection API from http.client.
|
|
|
|
"""
|
2009-03-31 11:35:53 -03:00
|
|
|
host = req.host
|
2008-06-18 17:49:58 -03:00
|
|
|
if not host:
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError('no host given')
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2010-10-13 07:36:15 -03:00
|
|
|
# will parse host:port
|
|
|
|
h = http_class(host, timeout=req.timeout, **http_conn_args)
|
2010-09-26 22:26:03 -03:00
|
|
|
|
|
|
|
headers = dict(req.unredirected_hdrs)
|
|
|
|
headers.update(dict((k, v) for k, v in req.headers.items()
|
|
|
|
if k not in headers))
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# TODO(jhylton): Should this be redesigned to handle
|
|
|
|
# persistent connections?
|
|
|
|
|
|
|
|
# We want to make an HTTP/1.1 request, but the addinfourl
|
|
|
|
# class isn't prepared to deal with a persistent connection.
|
|
|
|
# It will try to read all remaining data from the socket,
|
|
|
|
# which will block while the server waits for the next request.
|
|
|
|
# So make sure the connection gets closed after the (only)
|
|
|
|
# request.
|
|
|
|
headers["Connection"] = "close"
|
2009-03-31 11:35:53 -03:00
|
|
|
headers = dict((name.title(), val) for name, val in headers.items())
|
2009-07-25 01:24:38 -03:00
|
|
|
|
|
|
|
if req._tunnel_host:
|
2009-12-20 03:10:31 -04:00
|
|
|
tunnel_headers = {}
|
|
|
|
proxy_auth_hdr = "Proxy-Authorization"
|
|
|
|
if proxy_auth_hdr in headers:
|
|
|
|
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
|
|
|
|
# Proxy-Authorization should not be sent to origin
|
|
|
|
# server.
|
|
|
|
del headers[proxy_auth_hdr]
|
|
|
|
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
|
2009-07-25 01:24:38 -03:00
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
try:
|
2009-03-31 11:35:53 -03:00
|
|
|
h.request(req.get_method(), req.selector, req.data, headers)
|
2012-12-18 17:10:48 -04:00
|
|
|
except OSError as err: # timeout error
|
2011-07-26 22:31:03 -03:00
|
|
|
h.close()
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError(err)
|
2011-07-26 22:31:03 -03:00
|
|
|
else:
|
|
|
|
r = h.getresponse()
|
2012-10-21 12:37:43 -03:00
|
|
|
# If the server does not send us a 'Connection: close' header,
|
|
|
|
# HTTPConnection assumes the socket should be left open. Manually
|
|
|
|
# mark the socket to be closed when this response object goes away.
|
|
|
|
if h.sock:
|
|
|
|
h.sock.close()
|
|
|
|
h.sock = None
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2011-04-12 20:01:19 -03:00
|
|
|
r.url = req.get_full_url()
|
2009-03-31 11:35:53 -03:00
|
|
|
# This line replaces the .msg attribute of the HTTPResponse
|
|
|
|
# with .headers, because urllib clients expect the response to
|
|
|
|
# have the reason in .msg. It would be good to mark this
|
|
|
|
# attribute is deprecated and get then to use info() or
|
|
|
|
# .headers.
|
|
|
|
r.msg = r.reason
|
|
|
|
return r
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
|
|
|
|
class HTTPHandler(AbstractHTTPHandler):
|
|
|
|
|
|
|
|
def http_open(self, req):
|
|
|
|
return self.do_open(http.client.HTTPConnection, req)
|
|
|
|
|
|
|
|
http_request = AbstractHTTPHandler.do_request_
|
|
|
|
|
|
|
|
if hasattr(http.client, 'HTTPSConnection'):
|
2010-10-13 07:36:15 -03:00
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
class HTTPSHandler(AbstractHTTPHandler):
|
|
|
|
|
2010-10-13 07:36:15 -03:00
|
|
|
def __init__(self, debuglevel=0, context=None, check_hostname=None):
|
|
|
|
AbstractHTTPHandler.__init__(self, debuglevel)
|
|
|
|
self._context = context
|
|
|
|
self._check_hostname = check_hostname
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
def https_open(self, req):
|
2010-10-13 07:36:15 -03:00
|
|
|
return self.do_open(http.client.HTTPSConnection, req,
|
|
|
|
context=self._context, check_hostname=self._check_hostname)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
https_request = AbstractHTTPHandler.do_request_
|
|
|
|
|
2011-11-01 12:57:57 -03:00
|
|
|
__all__.append('HTTPSHandler')
|
2011-11-01 12:49:46 -03:00
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
class HTTPCookieProcessor(BaseHandler):
|
|
|
|
def __init__(self, cookiejar=None):
|
|
|
|
import http.cookiejar
|
|
|
|
if cookiejar is None:
|
|
|
|
cookiejar = http.cookiejar.CookieJar()
|
|
|
|
self.cookiejar = cookiejar
|
|
|
|
|
|
|
|
def http_request(self, request):
|
|
|
|
self.cookiejar.add_cookie_header(request)
|
|
|
|
return request
|
|
|
|
|
|
|
|
def http_response(self, request, response):
|
|
|
|
self.cookiejar.extract_cookies(response, request)
|
|
|
|
return response
|
|
|
|
|
|
|
|
https_request = http_request
|
|
|
|
https_response = http_response
|
|
|
|
|
|
|
|
class UnknownHandler(BaseHandler):
|
|
|
|
def unknown_open(self, req):
|
2009-03-31 11:35:53 -03:00
|
|
|
type = req.type
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError('unknown url type: %s' % type)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def parse_keqv_list(l):
|
|
|
|
"""Parse list of key=value strings where keys are not duplicated."""
|
|
|
|
parsed = {}
|
|
|
|
for elt in l:
|
|
|
|
k, v = elt.split('=', 1)
|
|
|
|
if v[0] == '"' and v[-1] == '"':
|
|
|
|
v = v[1:-1]
|
|
|
|
parsed[k] = v
|
|
|
|
return parsed
|
|
|
|
|
|
|
|
def parse_http_list(s):
|
|
|
|
"""Parse lists as described by RFC 2068 Section 2.
|
|
|
|
|
|
|
|
In particular, parse comma-separated lists where the elements of
|
|
|
|
the list may include quoted-strings. A quoted-string could
|
|
|
|
contain a comma. A non-quoted string could have quotes in the
|
|
|
|
middle. Neither commas nor quotes count if they are escaped.
|
|
|
|
Only double-quotes count, not single-quotes.
|
|
|
|
"""
|
|
|
|
res = []
|
|
|
|
part = ''
|
|
|
|
|
|
|
|
escape = quote = False
|
|
|
|
for cur in s:
|
|
|
|
if escape:
|
|
|
|
part += cur
|
|
|
|
escape = False
|
|
|
|
continue
|
|
|
|
if quote:
|
|
|
|
if cur == '\\':
|
|
|
|
escape = True
|
|
|
|
continue
|
|
|
|
elif cur == '"':
|
|
|
|
quote = False
|
|
|
|
part += cur
|
|
|
|
continue
|
|
|
|
|
|
|
|
if cur == ',':
|
|
|
|
res.append(part)
|
|
|
|
part = ''
|
|
|
|
continue
|
|
|
|
|
|
|
|
if cur == '"':
|
|
|
|
quote = True
|
|
|
|
|
|
|
|
part += cur
|
|
|
|
|
|
|
|
# append last part
|
|
|
|
if part:
|
|
|
|
res.append(part)
|
|
|
|
|
|
|
|
return [part.strip() for part in res]
|
|
|
|
|
|
|
|
class FileHandler(BaseHandler):
|
|
|
|
# Use local file or FTP depending on form of URL
|
|
|
|
def file_open(self, req):
|
2009-03-31 11:35:53 -03:00
|
|
|
url = req.selector
|
2010-07-11 00:12:43 -03:00
|
|
|
if url[:2] == '//' and url[2:3] != '/' and (req.host and
|
|
|
|
req.host != 'localhost'):
|
2010-10-14 08:57:35 -03:00
|
|
|
if not req.host is self.get_names():
|
|
|
|
raise URLError("file:// scheme is supported only on localhost")
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
return self.open_local_file(req)
|
|
|
|
|
|
|
|
# names for the localhost
|
|
|
|
names = None
|
|
|
|
def get_names(self):
|
|
|
|
if FileHandler.names is None:
|
|
|
|
try:
|
2009-12-27 06:13:39 -04:00
|
|
|
FileHandler.names = tuple(
|
|
|
|
socket.gethostbyname_ex('localhost')[2] +
|
|
|
|
socket.gethostbyname_ex(socket.gethostname())[2])
|
2008-06-18 17:49:58 -03:00
|
|
|
except socket.gaierror:
|
|
|
|
FileHandler.names = (socket.gethostbyname('localhost'),)
|
|
|
|
return FileHandler.names
|
|
|
|
|
|
|
|
# not entirely sure what the rules are here
|
|
|
|
def open_local_file(self, req):
|
|
|
|
import email.utils
|
|
|
|
import mimetypes
|
2009-03-31 11:35:53 -03:00
|
|
|
host = req.host
|
2010-05-08 02:12:05 -03:00
|
|
|
filename = req.selector
|
|
|
|
localfile = url2pathname(filename)
|
2008-06-18 17:49:58 -03:00
|
|
|
try:
|
|
|
|
stats = os.stat(localfile)
|
|
|
|
size = stats.st_size
|
|
|
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
2010-05-08 02:12:05 -03:00
|
|
|
mtype = mimetypes.guess_type(filename)[0]
|
2008-06-18 17:49:58 -03:00
|
|
|
headers = email.message_from_string(
|
|
|
|
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
|
|
|
|
(mtype or 'text/plain', size, modified))
|
|
|
|
if host:
|
2008-07-01 16:56:00 -03:00
|
|
|
host, port = splitport(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
if not host or \
|
|
|
|
(not port and _safe_gethostbyname(host) in self.get_names()):
|
2010-05-08 02:12:05 -03:00
|
|
|
if host:
|
|
|
|
origurl = 'file://' + host + filename
|
|
|
|
else:
|
|
|
|
origurl = 'file://' + filename
|
|
|
|
return addinfourl(open(localfile, 'rb'), headers, origurl)
|
2012-10-21 22:31:25 -03:00
|
|
|
except OSError as exp:
|
2008-06-23 08:44:14 -03:00
|
|
|
# users shouldn't expect OSErrors coming from urlopen()
|
2012-10-21 22:31:25 -03:00
|
|
|
raise URLError(exp)
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError('file not on local host')
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def _safe_gethostbyname(host):
|
|
|
|
try:
|
|
|
|
return socket.gethostbyname(host)
|
|
|
|
except socket.gaierror:
|
|
|
|
return None
|
|
|
|
|
|
|
|
class FTPHandler(BaseHandler):
|
|
|
|
def ftp_open(self, req):
|
|
|
|
import ftplib
|
|
|
|
import mimetypes
|
2009-03-31 11:35:53 -03:00
|
|
|
host = req.host
|
2008-06-18 17:49:58 -03:00
|
|
|
if not host:
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError('ftp error: no host given')
|
|
|
|
host, port = splitport(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
if port is None:
|
|
|
|
port = ftplib.FTP_PORT
|
|
|
|
else:
|
|
|
|
port = int(port)
|
|
|
|
|
|
|
|
# username/password handling
|
2008-07-01 16:56:00 -03:00
|
|
|
user, host = splituser(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
if user:
|
2008-07-01 16:56:00 -03:00
|
|
|
user, passwd = splitpasswd(user)
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
passwd = None
|
2008-07-01 16:56:00 -03:00
|
|
|
host = unquote(host)
|
2010-11-18 11:36:41 -04:00
|
|
|
user = user or ''
|
|
|
|
passwd = passwd or ''
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
try:
|
|
|
|
host = socket.gethostbyname(host)
|
2012-12-18 17:10:48 -04:00
|
|
|
except OSError as msg:
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError(msg)
|
2009-03-31 11:35:53 -03:00
|
|
|
path, attrs = splitattr(req.selector)
|
2008-06-18 17:49:58 -03:00
|
|
|
dirs = path.split('/')
|
2008-07-01 16:56:00 -03:00
|
|
|
dirs = list(map(unquote, dirs))
|
2008-06-18 17:49:58 -03:00
|
|
|
dirs, file = dirs[:-1], dirs[-1]
|
|
|
|
if dirs and not dirs[0]:
|
|
|
|
dirs = dirs[1:]
|
|
|
|
try:
|
|
|
|
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
|
|
|
|
type = file and 'I' or 'D'
|
|
|
|
for attr in attrs:
|
2008-07-01 16:56:00 -03:00
|
|
|
attr, value = splitvalue(attr)
|
2008-06-18 17:49:58 -03:00
|
|
|
if attr.lower() == 'type' and \
|
|
|
|
value in ('a', 'A', 'i', 'I', 'd', 'D'):
|
|
|
|
type = value.upper()
|
|
|
|
fp, retrlen = fw.retrfile(file, type)
|
|
|
|
headers = ""
|
2009-03-31 11:35:53 -03:00
|
|
|
mtype = mimetypes.guess_type(req.full_url)[0]
|
2008-06-18 17:49:58 -03:00
|
|
|
if mtype:
|
|
|
|
headers += "Content-type: %s\n" % mtype
|
|
|
|
if retrlen is not None and retrlen >= 0:
|
|
|
|
headers += "Content-length: %d\n" % retrlen
|
|
|
|
headers = email.message_from_string(headers)
|
2009-03-31 11:35:53 -03:00
|
|
|
return addinfourl(fp, headers, req.full_url)
|
2012-10-21 22:31:25 -03:00
|
|
|
except ftplib.all_errors as exp:
|
|
|
|
exc = URLError('ftp error: %r' % exp)
|
2008-06-18 17:49:58 -03:00
|
|
|
raise exc.with_traceback(sys.exc_info()[2])
|
|
|
|
|
|
|
|
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
|
2011-07-23 09:03:00 -03:00
|
|
|
return ftpwrapper(user, passwd, host, port, dirs, timeout,
|
|
|
|
persistent=False)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
class CacheFTPHandler(FTPHandler):
|
|
|
|
# XXX would be nice to have pluggable cache strategies
|
|
|
|
# XXX this stuff is definitely not thread safe
|
|
|
|
def __init__(self):
|
|
|
|
self.cache = {}
|
|
|
|
self.timeout = {}
|
|
|
|
self.soonest = 0
|
|
|
|
self.delay = 60
|
|
|
|
self.max_conns = 16
|
|
|
|
|
|
|
|
def setTimeout(self, t):
|
|
|
|
self.delay = t
|
|
|
|
|
|
|
|
def setMaxConns(self, m):
|
|
|
|
self.max_conns = m
|
|
|
|
|
|
|
|
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
|
|
|
|
key = user, host, port, '/'.join(dirs), timeout
|
|
|
|
if key in self.cache:
|
|
|
|
self.timeout[key] = time.time() + self.delay
|
|
|
|
else:
|
|
|
|
self.cache[key] = ftpwrapper(user, passwd, host, port,
|
|
|
|
dirs, timeout)
|
|
|
|
self.timeout[key] = time.time() + self.delay
|
|
|
|
self.check_cache()
|
|
|
|
return self.cache[key]
|
|
|
|
|
|
|
|
def check_cache(self):
|
|
|
|
# first check for old ones
|
|
|
|
t = time.time()
|
|
|
|
if self.soonest <= t:
|
|
|
|
for k, v in list(self.timeout.items()):
|
|
|
|
if v < t:
|
|
|
|
self.cache[k].close()
|
|
|
|
del self.cache[k]
|
|
|
|
del self.timeout[k]
|
|
|
|
self.soonest = min(list(self.timeout.values()))
|
|
|
|
|
|
|
|
# then check the size
|
|
|
|
if len(self.cache) == self.max_conns:
|
|
|
|
for k, v in list(self.timeout.items()):
|
|
|
|
if v == self.soonest:
|
|
|
|
del self.cache[k]
|
|
|
|
del self.timeout[k]
|
|
|
|
break
|
|
|
|
self.soonest = min(list(self.timeout.values()))
|
|
|
|
|
2011-07-23 09:03:00 -03:00
|
|
|
def clear_cache(self):
|
|
|
|
for conn in self.cache.values():
|
|
|
|
conn.close()
|
|
|
|
self.cache.clear()
|
|
|
|
self.timeout.clear()
|
|
|
|
|
2012-11-24 12:59:08 -04:00
|
|
|
class DataHandler(BaseHandler):
|
|
|
|
def data_open(self, req):
|
|
|
|
# data URLs as specified in RFC 2397.
|
|
|
|
#
|
|
|
|
# ignores POSTed data
|
|
|
|
#
|
|
|
|
# syntax:
|
|
|
|
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
|
|
|
|
# mediatype := [ type "/" subtype ] *( ";" parameter )
|
|
|
|
# data := *urlchar
|
|
|
|
# parameter := attribute "=" value
|
|
|
|
url = req.full_url
|
|
|
|
|
|
|
|
scheme, data = url.split(":",1)
|
|
|
|
mediatype, data = data.split(",",1)
|
|
|
|
|
|
|
|
# even base64 encoded data URLs might be quoted so unquote in any case:
|
|
|
|
data = unquote_to_bytes(data)
|
|
|
|
if mediatype.endswith(";base64"):
|
|
|
|
data = base64.decodebytes(data)
|
|
|
|
mediatype = mediatype[:-7]
|
|
|
|
|
|
|
|
if not mediatype:
|
|
|
|
mediatype = "text/plain;charset=US-ASCII"
|
|
|
|
|
|
|
|
headers = email.message_from_string("Content-type: %s\nContent-length: %d\n" %
|
|
|
|
(mediatype, len(data)))
|
|
|
|
|
|
|
|
return addinfourl(io.BytesIO(data), headers, url)
|
|
|
|
|
2011-07-23 09:03:00 -03:00
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
# Code move from the old urllib module
|
|
|
|
|
|
|
|
MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
|
|
|
|
|
|
|
|
# Helper for non-unix systems
|
2010-05-05 16:11:21 -03:00
|
|
|
if os.name == 'nt':
|
2008-06-18 17:49:58 -03:00
|
|
|
from nturl2path import url2pathname, pathname2url
|
|
|
|
else:
|
|
|
|
def url2pathname(pathname):
|
|
|
|
"""OS-specific conversion from a relative URL of the 'file' scheme
|
|
|
|
to a file system path; not recommended for general use."""
|
2008-07-01 16:56:00 -03:00
|
|
|
return unquote(pathname)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def pathname2url(pathname):
|
|
|
|
"""OS-specific conversion from a file system path to a relative URL
|
|
|
|
of the 'file' scheme; not recommended for general use."""
|
2008-07-01 16:56:00 -03:00
|
|
|
return quote(pathname)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# This really consists of two pieces:
|
|
|
|
# (1) a class which handles opening of all sorts of URLs
|
|
|
|
# (plus assorted utilities etc.)
|
|
|
|
# (2) a set of functions for parsing URLs
|
|
|
|
# XXX Should these be separated out into different modules?
|
|
|
|
|
|
|
|
|
|
|
|
ftpcache = {}
|
|
|
|
class URLopener:
|
|
|
|
"""Class to open URLs.
|
|
|
|
This is a class rather than just a subroutine because we may need
|
|
|
|
more than one set of global protocol-specific options.
|
|
|
|
Note -- this is a base class for those who don't want the
|
|
|
|
automatic handling of errors type 302 (relocated) and 401
|
|
|
|
(authorization needed)."""
|
|
|
|
|
|
|
|
__tempfiles = None
|
|
|
|
|
|
|
|
version = "Python-urllib/%s" % __version__
|
|
|
|
|
|
|
|
# Constructor
|
|
|
|
def __init__(self, proxies=None, **x509):
|
2012-06-24 14:56:31 -03:00
|
|
|
msg = "%(class)s style of invoking requests is deprecated. " \
|
2012-03-14 17:43:53 -03:00
|
|
|
"Use newer urlopen functions/methods" % {'class': self.__class__.__name__}
|
|
|
|
warnings.warn(msg, DeprecationWarning, stacklevel=3)
|
2008-06-18 17:49:58 -03:00
|
|
|
if proxies is None:
|
|
|
|
proxies = getproxies()
|
|
|
|
assert hasattr(proxies, 'keys'), "proxies must be a mapping"
|
|
|
|
self.proxies = proxies
|
|
|
|
self.key_file = x509.get('key_file')
|
|
|
|
self.cert_file = x509.get('cert_file')
|
|
|
|
self.addheaders = [('User-Agent', self.version)]
|
|
|
|
self.__tempfiles = []
|
|
|
|
self.__unlink = os.unlink # See cleanup()
|
|
|
|
self.tempcache = None
|
|
|
|
# Undocumented feature: if you assign {} to tempcache,
|
|
|
|
# it is used to cache files retrieved with
|
|
|
|
# self.retrieve(). This is not enabled by default
|
|
|
|
# since it does not work for changing documents (and I
|
|
|
|
# haven't got the logic to check expiration headers
|
|
|
|
# yet).
|
|
|
|
self.ftpcache = ftpcache
|
|
|
|
# Undocumented feature: you can use a different
|
|
|
|
# ftp cache by assigning to the .ftpcache member;
|
|
|
|
# in case you want logically independent URL openers
|
|
|
|
# XXX This is not threadsafe. Bah.
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self.close()
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cleanup()
|
|
|
|
|
|
|
|
def cleanup(self):
|
|
|
|
# This code sometimes runs when the rest of this module
|
|
|
|
# has already been deleted, so it can't use any globals
|
|
|
|
# or import anything.
|
|
|
|
if self.__tempfiles:
|
|
|
|
for file in self.__tempfiles:
|
|
|
|
try:
|
|
|
|
self.__unlink(file)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
del self.__tempfiles[:]
|
|
|
|
if self.tempcache:
|
|
|
|
self.tempcache.clear()
|
|
|
|
|
|
|
|
def addheader(self, *args):
|
|
|
|
"""Add a header to be used by the HTTP interface only
|
|
|
|
e.g. u.addheader('Accept', 'sound/basic')"""
|
|
|
|
self.addheaders.append(args)
|
|
|
|
|
|
|
|
# External interface
|
|
|
|
def open(self, fullurl, data=None):
|
|
|
|
"""Use URLopener().open(file) instead of open(file, 'r')."""
|
2008-07-01 16:56:00 -03:00
|
|
|
fullurl = unwrap(to_bytes(fullurl))
|
2010-02-20 18:19:04 -04:00
|
|
|
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
|
2008-06-18 17:49:58 -03:00
|
|
|
if self.tempcache and fullurl in self.tempcache:
|
|
|
|
filename, headers = self.tempcache[fullurl]
|
|
|
|
fp = open(filename, 'rb')
|
2008-07-01 16:56:00 -03:00
|
|
|
return addinfourl(fp, headers, fullurl)
|
|
|
|
urltype, url = splittype(fullurl)
|
2008-06-18 17:49:58 -03:00
|
|
|
if not urltype:
|
|
|
|
urltype = 'file'
|
|
|
|
if urltype in self.proxies:
|
|
|
|
proxy = self.proxies[urltype]
|
2008-07-01 16:56:00 -03:00
|
|
|
urltype, proxyhost = splittype(proxy)
|
|
|
|
host, selector = splithost(proxyhost)
|
2008-06-18 17:49:58 -03:00
|
|
|
url = (host, fullurl) # Signal special case to open_*()
|
|
|
|
else:
|
|
|
|
proxy = None
|
|
|
|
name = 'open_' + urltype
|
|
|
|
self.type = urltype
|
|
|
|
name = name.replace('-', '_')
|
|
|
|
if not hasattr(self, name):
|
|
|
|
if proxy:
|
|
|
|
return self.open_unknown_proxy(proxy, fullurl, data)
|
|
|
|
else:
|
|
|
|
return self.open_unknown(fullurl, data)
|
|
|
|
try:
|
|
|
|
if data is None:
|
|
|
|
return getattr(self, name)(url)
|
|
|
|
else:
|
|
|
|
return getattr(self, name)(url, data)
|
2012-10-21 17:30:02 -03:00
|
|
|
except (HTTPError, URLError):
|
2011-10-11 21:54:14 -03:00
|
|
|
raise
|
2012-12-18 17:10:48 -04:00
|
|
|
except OSError as msg:
|
2012-12-25 10:47:37 -04:00
|
|
|
raise OSError('socket error', msg).with_traceback(sys.exc_info()[2])
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def open_unknown(self, fullurl, data=None):
|
|
|
|
"""Overridable interface to open unknown URL type."""
|
2008-07-01 16:56:00 -03:00
|
|
|
type, url = splittype(fullurl)
|
2012-12-25 10:47:37 -04:00
|
|
|
raise OSError('url error', 'unknown url type', type)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def open_unknown_proxy(self, proxy, fullurl, data=None):
|
|
|
|
"""Overridable interface to open unknown URL type."""
|
2008-07-01 16:56:00 -03:00
|
|
|
type, url = splittype(fullurl)
|
2012-12-25 10:47:37 -04:00
|
|
|
raise OSError('url error', 'invalid proxy for %s' % type, proxy)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# External interface
|
|
|
|
def retrieve(self, url, filename=None, reporthook=None, data=None):
|
|
|
|
"""retrieve(url) returns (filename, headers) for a local object
|
|
|
|
or (tempfilename, headers) for a remote object."""
|
2008-07-01 16:56:00 -03:00
|
|
|
url = unwrap(to_bytes(url))
|
2008-06-18 17:49:58 -03:00
|
|
|
if self.tempcache and url in self.tempcache:
|
|
|
|
return self.tempcache[url]
|
2008-07-01 16:56:00 -03:00
|
|
|
type, url1 = splittype(url)
|
2008-06-18 17:49:58 -03:00
|
|
|
if filename is None and (not type or type == 'file'):
|
|
|
|
try:
|
|
|
|
fp = self.open_local_file(url1)
|
|
|
|
hdrs = fp.info()
|
2009-12-02 22:45:01 -04:00
|
|
|
fp.close()
|
2008-07-01 16:56:00 -03:00
|
|
|
return url2pathname(splithost(url1)[1]), hdrs
|
2012-12-25 10:47:37 -04:00
|
|
|
except OSError as msg:
|
2008-06-18 17:49:58 -03:00
|
|
|
pass
|
|
|
|
fp = self.open(url, data)
|
Merged revisions 70518,70521,70590,70594-70595 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
........
r70518 | matthias.klose | 2009-03-22 08:08:22 -0500 (Sun, 22 Mar 2009) | 2 lines
- Fix comment macro in python.man
........
r70521 | benjamin.peterson | 2009-03-22 12:45:11 -0500 (Sun, 22 Mar 2009) | 1 line
close the file even if an exception occurs #5536
........
r70590 | skip.montanaro | 2009-03-24 19:52:11 -0500 (Tue, 24 Mar 2009) | 1 line
clarify the type of data returned
........
r70594 | marc-andre.lemburg | 2009-03-25 14:44:58 -0500 (Wed, 25 Mar 2009) | 9 lines
Remove the sys.version_info shortcut, since they cause the APIs
to return different information than the _sys_version() output
used in previous Python versions.
This also fixes issue5561: platform.python_version_tuple returns tuple of ints, should be strings
Added more tests for the various platform functions.
........
r70595 | marc-andre.lemburg | 2009-03-25 14:45:33 -0500 (Wed, 25 Mar 2009) | 3 lines
News item for the platform.py fix (r70594).
........
2009-03-26 18:49:58 -03:00
|
|
|
try:
|
|
|
|
headers = fp.info()
|
|
|
|
if filename:
|
|
|
|
tfp = open(filename, 'wb')
|
|
|
|
else:
|
|
|
|
import tempfile
|
|
|
|
garbage, path = splittype(url)
|
|
|
|
garbage, path = splithost(path or "")
|
|
|
|
path, garbage = splitquery(path or "")
|
|
|
|
path, garbage = splitattr(path or "")
|
|
|
|
suffix = os.path.splitext(path)[1]
|
|
|
|
(fd, filename) = tempfile.mkstemp(suffix)
|
|
|
|
self.__tempfiles.append(filename)
|
|
|
|
tfp = os.fdopen(fd, 'wb')
|
|
|
|
try:
|
|
|
|
result = filename, headers
|
|
|
|
if self.tempcache is not None:
|
|
|
|
self.tempcache[url] = result
|
|
|
|
bs = 1024*8
|
|
|
|
size = -1
|
|
|
|
read = 0
|
|
|
|
blocknum = 0
|
2011-10-31 14:35:17 -03:00
|
|
|
if "content-length" in headers:
|
|
|
|
size = int(headers["Content-Length"])
|
Merged revisions 70518,70521,70590,70594-70595 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
........
r70518 | matthias.klose | 2009-03-22 08:08:22 -0500 (Sun, 22 Mar 2009) | 2 lines
- Fix comment macro in python.man
........
r70521 | benjamin.peterson | 2009-03-22 12:45:11 -0500 (Sun, 22 Mar 2009) | 1 line
close the file even if an exception occurs #5536
........
r70590 | skip.montanaro | 2009-03-24 19:52:11 -0500 (Tue, 24 Mar 2009) | 1 line
clarify the type of data returned
........
r70594 | marc-andre.lemburg | 2009-03-25 14:44:58 -0500 (Wed, 25 Mar 2009) | 9 lines
Remove the sys.version_info shortcut, since they cause the APIs
to return different information than the _sys_version() output
used in previous Python versions.
This also fixes issue5561: platform.python_version_tuple returns tuple of ints, should be strings
Added more tests for the various platform functions.
........
r70595 | marc-andre.lemburg | 2009-03-25 14:45:33 -0500 (Wed, 25 Mar 2009) | 3 lines
News item for the platform.py fix (r70594).
........
2009-03-26 18:49:58 -03:00
|
|
|
if reporthook:
|
|
|
|
reporthook(blocknum, bs, size)
|
|
|
|
while 1:
|
|
|
|
block = fp.read(bs)
|
|
|
|
if not block:
|
|
|
|
break
|
|
|
|
read += len(block)
|
|
|
|
tfp.write(block)
|
|
|
|
blocknum += 1
|
|
|
|
if reporthook:
|
|
|
|
reporthook(blocknum, bs, size)
|
|
|
|
finally:
|
|
|
|
tfp.close()
|
|
|
|
finally:
|
|
|
|
fp.close()
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# raise exception if actual size does not match content-length header
|
|
|
|
if size >= 0 and read < size:
|
2008-07-01 16:56:00 -03:00
|
|
|
raise ContentTooShortError(
|
2008-06-18 17:49:58 -03:00
|
|
|
"retrieval incomplete: got only %i out of %i bytes"
|
|
|
|
% (read, size), result)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
# Each method named open_<type> knows how to open that type of URL
|
|
|
|
|
|
|
|
def _open_generic_http(self, connection_factory, url, data):
|
|
|
|
"""Make an HTTP connection using connection_class.
|
|
|
|
|
|
|
|
This is an internal method that should be called from
|
|
|
|
open_http() or open_https().
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
- connection_factory should take a host name and return an
|
|
|
|
HTTPConnection instance.
|
|
|
|
- url is the url to retrieval or a host, relative-path pair.
|
|
|
|
- data is payload for a POST request or None.
|
|
|
|
"""
|
|
|
|
|
|
|
|
user_passwd = None
|
|
|
|
proxy_passwd= None
|
|
|
|
if isinstance(url, str):
|
2008-07-01 16:56:00 -03:00
|
|
|
host, selector = splithost(url)
|
2008-06-18 17:49:58 -03:00
|
|
|
if host:
|
2008-07-01 16:56:00 -03:00
|
|
|
user_passwd, host = splituser(host)
|
|
|
|
host = unquote(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
realhost = host
|
|
|
|
else:
|
|
|
|
host, selector = url
|
|
|
|
# check whether the proxy contains authorization information
|
2008-07-01 16:56:00 -03:00
|
|
|
proxy_passwd, host = splituser(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
# now we proceed with the url we want to obtain
|
2008-07-01 16:56:00 -03:00
|
|
|
urltype, rest = splittype(selector)
|
2008-06-18 17:49:58 -03:00
|
|
|
url = rest
|
|
|
|
user_passwd = None
|
|
|
|
if urltype.lower() != 'http':
|
|
|
|
realhost = None
|
|
|
|
else:
|
2008-07-01 16:56:00 -03:00
|
|
|
realhost, rest = splithost(rest)
|
2008-06-18 17:49:58 -03:00
|
|
|
if realhost:
|
2008-07-01 16:56:00 -03:00
|
|
|
user_passwd, realhost = splituser(realhost)
|
2008-06-18 17:49:58 -03:00
|
|
|
if user_passwd:
|
|
|
|
selector = "%s://%s%s" % (urltype, realhost, rest)
|
|
|
|
if proxy_bypass(realhost):
|
|
|
|
host = realhost
|
|
|
|
|
2012-12-25 10:47:37 -04:00
|
|
|
if not host: raise OSError('http error', 'no host given')
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
if proxy_passwd:
|
2012-01-14 07:09:04 -04:00
|
|
|
proxy_passwd = unquote(proxy_passwd)
|
2010-08-04 14:46:23 -03:00
|
|
|
proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
proxy_auth = None
|
|
|
|
|
|
|
|
if user_passwd:
|
2012-01-14 07:09:04 -04:00
|
|
|
user_passwd = unquote(user_passwd)
|
2010-08-04 14:46:23 -03:00
|
|
|
auth = base64.b64encode(user_passwd.encode()).decode('ascii')
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
auth = None
|
|
|
|
http_conn = connection_factory(host)
|
|
|
|
headers = {}
|
|
|
|
if proxy_auth:
|
|
|
|
headers["Proxy-Authorization"] = "Basic %s" % proxy_auth
|
|
|
|
if auth:
|
|
|
|
headers["Authorization"] = "Basic %s" % auth
|
|
|
|
if realhost:
|
|
|
|
headers["Host"] = realhost
|
2011-03-19 06:25:27 -03:00
|
|
|
|
|
|
|
# Add Connection:close as we don't support persistent connections yet.
|
|
|
|
# This helps in closing the socket and avoiding ResourceWarning
|
|
|
|
|
|
|
|
headers["Connection"] = "close"
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
for header, value in self.addheaders:
|
|
|
|
headers[header] = value
|
|
|
|
|
|
|
|
if data is not None:
|
|
|
|
headers["Content-Type"] = "application/x-www-form-urlencoded"
|
|
|
|
http_conn.request("POST", selector, data, headers)
|
|
|
|
else:
|
|
|
|
http_conn.request("GET", selector, headers=headers)
|
|
|
|
|
|
|
|
try:
|
|
|
|
response = http_conn.getresponse()
|
|
|
|
except http.client.BadStatusLine:
|
|
|
|
# something went wrong with the HTTP status line
|
2008-07-01 16:56:00 -03:00
|
|
|
raise URLError("http protocol error: bad status line")
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
# According to RFC 2616, "2xx" code indicates that the client's
|
|
|
|
# request was successfully received, understood, and accepted.
|
|
|
|
if 200 <= response.status < 300:
|
2009-02-10 20:39:14 -04:00
|
|
|
return addinfourl(response, response.msg, "http:" + url,
|
2008-07-01 16:56:00 -03:00
|
|
|
response.status)
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
return self.http_error(
|
|
|
|
url, response.fp,
|
|
|
|
response.status, response.reason, response.msg, data)
|
|
|
|
|
|
|
|
def open_http(self, url, data=None):
|
|
|
|
"""Use HTTP protocol."""
|
|
|
|
return self._open_generic_http(http.client.HTTPConnection, url, data)
|
|
|
|
|
|
|
|
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Handle http errors.
|
|
|
|
|
|
|
|
Derived class can override this, or provide specific handlers
|
|
|
|
named http_error_DDD where DDD is the 3-digit error code."""
|
|
|
|
# First check if there's a specific handler for this error
|
|
|
|
name = 'http_error_%d' % errcode
|
|
|
|
if hasattr(self, name):
|
|
|
|
method = getattr(self, name)
|
|
|
|
if data is None:
|
|
|
|
result = method(url, fp, errcode, errmsg, headers)
|
|
|
|
else:
|
|
|
|
result = method(url, fp, errcode, errmsg, headers, data)
|
|
|
|
if result: return result
|
|
|
|
return self.http_error_default(url, fp, errcode, errmsg, headers)
|
|
|
|
|
|
|
|
def http_error_default(self, url, fp, errcode, errmsg, headers):
|
2012-12-25 10:47:37 -04:00
|
|
|
"""Default error handler: close the connection and raise OSError."""
|
2008-06-18 17:49:58 -03:00
|
|
|
fp.close()
|
2008-07-01 16:56:00 -03:00
|
|
|
raise HTTPError(url, errcode, errmsg, headers, None)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
if _have_ssl:
|
|
|
|
def _https_connection(self, host):
|
|
|
|
return http.client.HTTPSConnection(host,
|
|
|
|
key_file=self.key_file,
|
|
|
|
cert_file=self.cert_file)
|
|
|
|
|
|
|
|
def open_https(self, url, data=None):
|
|
|
|
"""Use HTTPS protocol."""
|
|
|
|
return self._open_generic_http(self._https_connection, url, data)
|
|
|
|
|
|
|
|
def open_file(self, url):
|
|
|
|
"""Use local file or FTP depending on form of URL."""
|
|
|
|
if not isinstance(url, str):
|
2012-10-21 22:31:25 -03:00
|
|
|
raise URLError('file error: proxy support for file protocol currently not implemented')
|
2008-06-18 17:49:58 -03:00
|
|
|
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
|
2010-10-14 08:57:35 -03:00
|
|
|
raise ValueError("file:// scheme is supported only on localhost")
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
|
|
|
return self.open_local_file(url)
|
|
|
|
|
|
|
|
def open_local_file(self, url):
|
|
|
|
"""Use local file."""
|
2011-11-01 12:20:31 -03:00
|
|
|
import email.utils
|
|
|
|
import mimetypes
|
2008-07-01 16:56:00 -03:00
|
|
|
host, file = splithost(url)
|
2008-06-18 17:49:58 -03:00
|
|
|
localname = url2pathname(file)
|
|
|
|
try:
|
|
|
|
stats = os.stat(localname)
|
|
|
|
except OSError as e:
|
2012-10-21 17:30:02 -03:00
|
|
|
raise URLError(e.strerror, e.filename)
|
2008-06-18 17:49:58 -03:00
|
|
|
size = stats.st_size
|
|
|
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
|
|
|
mtype = mimetypes.guess_type(url)[0]
|
|
|
|
headers = email.message_from_string(
|
|
|
|
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
|
|
|
|
(mtype or 'text/plain', size, modified))
|
|
|
|
if not host:
|
|
|
|
urlfile = file
|
|
|
|
if file[:1] == '/':
|
|
|
|
urlfile = 'file://' + file
|
2008-07-01 16:56:00 -03:00
|
|
|
return addinfourl(open(localname, 'rb'), headers, urlfile)
|
|
|
|
host, port = splitport(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
if (not port
|
2012-10-22 13:43:04 -03:00
|
|
|
and socket.gethostbyname(host) in ((localhost(),) + thishost())):
|
2008-06-18 17:49:58 -03:00
|
|
|
urlfile = file
|
|
|
|
if file[:1] == '/':
|
|
|
|
urlfile = 'file://' + file
|
2012-01-20 23:52:48 -04:00
|
|
|
elif file[:2] == './':
|
|
|
|
raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url)
|
2008-07-01 16:56:00 -03:00
|
|
|
return addinfourl(open(localname, 'rb'), headers, urlfile)
|
2012-10-21 22:31:25 -03:00
|
|
|
raise URLError('local file error: not on local host')
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def open_ftp(self, url):
|
|
|
|
"""Use FTP protocol."""
|
|
|
|
if not isinstance(url, str):
|
2012-10-21 22:31:25 -03:00
|
|
|
raise URLError('ftp error: proxy support for ftp protocol currently not implemented')
|
2008-06-18 17:49:58 -03:00
|
|
|
import mimetypes
|
2008-07-01 16:56:00 -03:00
|
|
|
host, path = splithost(url)
|
2012-10-21 22:31:25 -03:00
|
|
|
if not host: raise URLError('ftp error: no host given')
|
2008-07-01 16:56:00 -03:00
|
|
|
host, port = splitport(host)
|
|
|
|
user, host = splituser(host)
|
|
|
|
if user: user, passwd = splitpasswd(user)
|
2008-06-18 17:49:58 -03:00
|
|
|
else: passwd = None
|
2008-07-01 16:56:00 -03:00
|
|
|
host = unquote(host)
|
|
|
|
user = unquote(user or '')
|
|
|
|
passwd = unquote(passwd or '')
|
2008-06-18 17:49:58 -03:00
|
|
|
host = socket.gethostbyname(host)
|
|
|
|
if not port:
|
|
|
|
import ftplib
|
|
|
|
port = ftplib.FTP_PORT
|
|
|
|
else:
|
|
|
|
port = int(port)
|
2008-07-01 16:56:00 -03:00
|
|
|
path, attrs = splitattr(path)
|
|
|
|
path = unquote(path)
|
2008-06-18 17:49:58 -03:00
|
|
|
dirs = path.split('/')
|
|
|
|
dirs, file = dirs[:-1], dirs[-1]
|
|
|
|
if dirs and not dirs[0]: dirs = dirs[1:]
|
|
|
|
if dirs and not dirs[0]: dirs[0] = '/'
|
|
|
|
key = user, host, port, '/'.join(dirs)
|
|
|
|
# XXX thread unsafe!
|
|
|
|
if len(self.ftpcache) > MAXFTPCACHE:
|
|
|
|
# Prune the cache, rather arbitrarily
|
|
|
|
for k in self.ftpcache.keys():
|
|
|
|
if k != key:
|
|
|
|
v = self.ftpcache[k]
|
|
|
|
del self.ftpcache[k]
|
|
|
|
v.close()
|
|
|
|
try:
|
2011-10-19 15:48:01 -03:00
|
|
|
if key not in self.ftpcache:
|
2008-06-18 17:49:58 -03:00
|
|
|
self.ftpcache[key] = \
|
|
|
|
ftpwrapper(user, passwd, host, port, dirs)
|
|
|
|
if not file: type = 'D'
|
|
|
|
else: type = 'I'
|
|
|
|
for attr in attrs:
|
2008-07-01 16:56:00 -03:00
|
|
|
attr, value = splitvalue(attr)
|
2008-06-18 17:49:58 -03:00
|
|
|
if attr.lower() == 'type' and \
|
|
|
|
value in ('a', 'A', 'i', 'I', 'd', 'D'):
|
|
|
|
type = value.upper()
|
|
|
|
(fp, retrlen) = self.ftpcache[key].retrfile(file, type)
|
|
|
|
mtype = mimetypes.guess_type("ftp:" + url)[0]
|
|
|
|
headers = ""
|
|
|
|
if mtype:
|
|
|
|
headers += "Content-Type: %s\n" % mtype
|
|
|
|
if retrlen is not None and retrlen >= 0:
|
|
|
|
headers += "Content-Length: %d\n" % retrlen
|
|
|
|
headers = email.message_from_string(headers)
|
2008-07-01 16:56:00 -03:00
|
|
|
return addinfourl(fp, headers, "ftp:" + url)
|
2012-10-21 22:31:25 -03:00
|
|
|
except ftperrors() as exp:
|
|
|
|
raise URLError('ftp error %r' % exp).with_traceback(sys.exc_info()[2])
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def open_data(self, url, data=None):
|
|
|
|
"""Use "data" URL."""
|
|
|
|
if not isinstance(url, str):
|
2012-10-21 22:31:25 -03:00
|
|
|
raise URLError('data error: proxy support for data protocol currently not implemented')
|
2008-06-18 17:49:58 -03:00
|
|
|
# ignore POSTed data
|
|
|
|
#
|
|
|
|
# syntax of data URLs:
|
|
|
|
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
|
|
|
|
# mediatype := [ type "/" subtype ] *( ";" parameter )
|
|
|
|
# data := *urlchar
|
|
|
|
# parameter := attribute "=" value
|
|
|
|
try:
|
|
|
|
[type, data] = url.split(',', 1)
|
|
|
|
except ValueError:
|
2012-12-25 10:47:37 -04:00
|
|
|
raise OSError('data error', 'bad data URL')
|
2008-06-18 17:49:58 -03:00
|
|
|
if not type:
|
|
|
|
type = 'text/plain;charset=US-ASCII'
|
|
|
|
semi = type.rfind(';')
|
|
|
|
if semi >= 0 and '=' not in type[semi:]:
|
|
|
|
encoding = type[semi+1:]
|
|
|
|
type = type[:semi]
|
|
|
|
else:
|
|
|
|
encoding = ''
|
|
|
|
msg = []
|
2010-05-01 05:29:18 -03:00
|
|
|
msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
|
2008-06-18 17:49:58 -03:00
|
|
|
time.gmtime(time.time())))
|
|
|
|
msg.append('Content-type: %s' % type)
|
|
|
|
if encoding == 'base64':
|
2009-06-04 06:42:55 -03:00
|
|
|
# XXX is this encoding/decoding ok?
|
2011-02-25 11:42:01 -04:00
|
|
|
data = base64.decodebytes(data.encode('ascii')).decode('latin-1')
|
2008-06-18 17:49:58 -03:00
|
|
|
else:
|
2008-07-01 16:56:00 -03:00
|
|
|
data = unquote(data)
|
2008-06-18 17:49:58 -03:00
|
|
|
msg.append('Content-Length: %d' % len(data))
|
|
|
|
msg.append('')
|
|
|
|
msg.append(data)
|
|
|
|
msg = '\n'.join(msg)
|
2008-07-01 16:56:00 -03:00
|
|
|
headers = email.message_from_string(msg)
|
|
|
|
f = io.StringIO(msg)
|
2008-06-18 17:49:58 -03:00
|
|
|
#f.fileno = None # needed for addinfourl
|
2008-07-01 16:56:00 -03:00
|
|
|
return addinfourl(f, headers, url)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
|
|
|
|
class FancyURLopener(URLopener):
|
|
|
|
"""Derived class with handlers for errors we can handle (perhaps)."""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
URLopener.__init__(self, *args, **kwargs)
|
|
|
|
self.auth_cache = {}
|
|
|
|
self.tries = 0
|
|
|
|
self.maxtries = 10
|
|
|
|
|
|
|
|
def http_error_default(self, url, fp, errcode, errmsg, headers):
|
|
|
|
"""Default error handling -- don't raise an exception."""
|
2008-07-01 16:56:00 -03:00
|
|
|
return addinfourl(fp, headers, "http:" + url, errcode)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Error 302 -- relocated (temporarily)."""
|
|
|
|
self.tries += 1
|
|
|
|
if self.maxtries and self.tries >= self.maxtries:
|
|
|
|
if hasattr(self, "http_error_500"):
|
|
|
|
meth = self.http_error_500
|
|
|
|
else:
|
|
|
|
meth = self.http_error_default
|
|
|
|
self.tries = 0
|
|
|
|
return meth(url, fp, 500,
|
|
|
|
"Internal Server Error: Redirect Recursion", headers)
|
|
|
|
result = self.redirect_internal(url, fp, errcode, errmsg, headers,
|
|
|
|
data)
|
|
|
|
self.tries = 0
|
|
|
|
return result
|
|
|
|
|
|
|
|
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
|
|
|
|
if 'location' in headers:
|
|
|
|
newurl = headers['location']
|
|
|
|
elif 'uri' in headers:
|
|
|
|
newurl = headers['uri']
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
fp.close()
|
2011-03-29 15:41:02 -03:00
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
# In case the server sent a relative URL, join with original:
|
2008-07-01 16:56:00 -03:00
|
|
|
newurl = urljoin(self.type + ":" + url, newurl)
|
2011-03-29 15:41:02 -03:00
|
|
|
|
|
|
|
urlparts = urlparse(newurl)
|
|
|
|
|
|
|
|
# For security reasons, we don't allow redirection to anything other
|
|
|
|
# than http, https and ftp.
|
|
|
|
|
|
|
|
# We are using newer HTTPError with older redirect_internal method
|
|
|
|
# This older method will get deprecated in 3.3
|
|
|
|
|
2012-01-04 01:46:59 -04:00
|
|
|
if urlparts.scheme not in ('http', 'https', 'ftp', ''):
|
2011-03-29 15:41:02 -03:00
|
|
|
raise HTTPError(newurl, errcode,
|
|
|
|
errmsg +
|
|
|
|
" Redirection to url '%s' is not allowed." % newurl,
|
|
|
|
headers, fp)
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
return self.open(newurl)
|
|
|
|
|
|
|
|
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Error 301 -- also relocated (permanently)."""
|
|
|
|
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
|
|
|
|
|
|
|
|
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Error 303 -- also relocated (essentially identical to 302)."""
|
|
|
|
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
|
|
|
|
|
|
|
|
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Error 307 -- relocated, but turn POST into error."""
|
|
|
|
if data is None:
|
|
|
|
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
|
|
|
|
else:
|
|
|
|
return self.http_error_default(url, fp, errcode, errmsg, headers)
|
|
|
|
|
2010-06-18 12:08:18 -03:00
|
|
|
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None,
|
|
|
|
retry=False):
|
2008-06-18 17:49:58 -03:00
|
|
|
"""Error 401 -- authentication required.
|
|
|
|
This function supports Basic authentication only."""
|
2011-10-19 15:48:01 -03:00
|
|
|
if 'www-authenticate' not in headers:
|
2008-06-18 17:49:58 -03:00
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
|
|
|
stuff = headers['www-authenticate']
|
|
|
|
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
|
|
|
|
if not match:
|
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
|
|
|
scheme, realm = match.groups()
|
|
|
|
if scheme.lower() != 'basic':
|
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
2010-06-18 12:08:18 -03:00
|
|
|
if not retry:
|
|
|
|
URLopener.http_error_default(self, url, fp, errcode, errmsg,
|
|
|
|
headers)
|
2008-06-18 17:49:58 -03:00
|
|
|
name = 'retry_' + self.type + '_basic_auth'
|
|
|
|
if data is None:
|
|
|
|
return getattr(self,name)(url, realm)
|
|
|
|
else:
|
|
|
|
return getattr(self,name)(url, realm, data)
|
|
|
|
|
2010-06-18 12:08:18 -03:00
|
|
|
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None,
|
|
|
|
retry=False):
|
2008-06-18 17:49:58 -03:00
|
|
|
"""Error 407 -- proxy authentication required.
|
|
|
|
This function supports Basic authentication only."""
|
2011-10-19 15:48:01 -03:00
|
|
|
if 'proxy-authenticate' not in headers:
|
2008-06-18 17:49:58 -03:00
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
|
|
|
stuff = headers['proxy-authenticate']
|
|
|
|
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
|
|
|
|
if not match:
|
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
|
|
|
scheme, realm = match.groups()
|
|
|
|
if scheme.lower() != 'basic':
|
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
2010-06-18 12:08:18 -03:00
|
|
|
if not retry:
|
|
|
|
URLopener.http_error_default(self, url, fp, errcode, errmsg,
|
|
|
|
headers)
|
2008-06-18 17:49:58 -03:00
|
|
|
name = 'retry_proxy_' + self.type + '_basic_auth'
|
|
|
|
if data is None:
|
|
|
|
return getattr(self,name)(url, realm)
|
|
|
|
else:
|
|
|
|
return getattr(self,name)(url, realm, data)
|
|
|
|
|
|
|
|
def retry_proxy_http_basic_auth(self, url, realm, data=None):
|
2008-07-01 16:56:00 -03:00
|
|
|
host, selector = splithost(url)
|
2008-06-18 17:49:58 -03:00
|
|
|
newurl = 'http://' + host + selector
|
|
|
|
proxy = self.proxies['http']
|
2008-07-01 16:56:00 -03:00
|
|
|
urltype, proxyhost = splittype(proxy)
|
|
|
|
proxyhost, proxyselector = splithost(proxyhost)
|
2008-06-18 17:49:58 -03:00
|
|
|
i = proxyhost.find('@') + 1
|
|
|
|
proxyhost = proxyhost[i:]
|
|
|
|
user, passwd = self.get_user_passwd(proxyhost, realm, i)
|
|
|
|
if not (user or passwd): return None
|
2008-07-01 16:56:00 -03:00
|
|
|
proxyhost = "%s:%s@%s" % (quote(user, safe=''),
|
2008-06-18 17:49:58 -03:00
|
|
|
quote(passwd, safe=''), proxyhost)
|
|
|
|
self.proxies['http'] = 'http://' + proxyhost + proxyselector
|
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
|
|
|
|
|
|
|
def retry_proxy_https_basic_auth(self, url, realm, data=None):
|
2008-07-01 16:56:00 -03:00
|
|
|
host, selector = splithost(url)
|
2008-06-18 17:49:58 -03:00
|
|
|
newurl = 'https://' + host + selector
|
|
|
|
proxy = self.proxies['https']
|
2008-07-01 16:56:00 -03:00
|
|
|
urltype, proxyhost = splittype(proxy)
|
|
|
|
proxyhost, proxyselector = splithost(proxyhost)
|
2008-06-18 17:49:58 -03:00
|
|
|
i = proxyhost.find('@') + 1
|
|
|
|
proxyhost = proxyhost[i:]
|
|
|
|
user, passwd = self.get_user_passwd(proxyhost, realm, i)
|
|
|
|
if not (user or passwd): return None
|
2008-07-01 16:56:00 -03:00
|
|
|
proxyhost = "%s:%s@%s" % (quote(user, safe=''),
|
2008-06-18 17:49:58 -03:00
|
|
|
quote(passwd, safe=''), proxyhost)
|
|
|
|
self.proxies['https'] = 'https://' + proxyhost + proxyselector
|
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
|
|
|
|
|
|
|
def retry_http_basic_auth(self, url, realm, data=None):
|
2008-07-01 16:56:00 -03:00
|
|
|
host, selector = splithost(url)
|
2008-06-18 17:49:58 -03:00
|
|
|
i = host.find('@') + 1
|
|
|
|
host = host[i:]
|
|
|
|
user, passwd = self.get_user_passwd(host, realm, i)
|
|
|
|
if not (user or passwd): return None
|
2008-07-01 16:56:00 -03:00
|
|
|
host = "%s:%s@%s" % (quote(user, safe=''),
|
2008-06-18 17:49:58 -03:00
|
|
|
quote(passwd, safe=''), host)
|
|
|
|
newurl = 'http://' + host + selector
|
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
|
|
|
|
|
|
|
def retry_https_basic_auth(self, url, realm, data=None):
|
2008-07-01 16:56:00 -03:00
|
|
|
host, selector = splithost(url)
|
2008-06-18 17:49:58 -03:00
|
|
|
i = host.find('@') + 1
|
|
|
|
host = host[i:]
|
|
|
|
user, passwd = self.get_user_passwd(host, realm, i)
|
|
|
|
if not (user or passwd): return None
|
2008-07-01 16:56:00 -03:00
|
|
|
host = "%s:%s@%s" % (quote(user, safe=''),
|
2008-06-18 17:49:58 -03:00
|
|
|
quote(passwd, safe=''), host)
|
|
|
|
newurl = 'https://' + host + selector
|
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
|
|
|
|
2010-05-17 14:24:07 -03:00
|
|
|
def get_user_passwd(self, host, realm, clear_cache=0):
|
2008-06-18 17:49:58 -03:00
|
|
|
key = realm + '@' + host.lower()
|
|
|
|
if key in self.auth_cache:
|
|
|
|
if clear_cache:
|
|
|
|
del self.auth_cache[key]
|
|
|
|
else:
|
|
|
|
return self.auth_cache[key]
|
|
|
|
user, passwd = self.prompt_user_passwd(host, realm)
|
|
|
|
if user or passwd: self.auth_cache[key] = (user, passwd)
|
|
|
|
return user, passwd
|
|
|
|
|
|
|
|
def prompt_user_passwd(self, host, realm):
|
|
|
|
"""Override this in a GUI environment!"""
|
|
|
|
import getpass
|
|
|
|
try:
|
|
|
|
user = input("Enter username for %s at %s: " % (realm, host))
|
|
|
|
passwd = getpass.getpass("Enter password for %s in %s at %s: " %
|
|
|
|
(user, realm, host))
|
|
|
|
return user, passwd
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print()
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
|
|
# Utility functions
|
|
|
|
|
|
|
|
_localhost = None
|
|
|
|
def localhost():
|
|
|
|
"""Return the IP address of the magic hostname 'localhost'."""
|
|
|
|
global _localhost
|
|
|
|
if _localhost is None:
|
|
|
|
_localhost = socket.gethostbyname('localhost')
|
|
|
|
return _localhost
|
|
|
|
|
|
|
|
_thishost = None
|
|
|
|
def thishost():
|
2009-12-27 06:13:39 -04:00
|
|
|
"""Return the IP addresses of the current host."""
|
2008-06-18 17:49:58 -03:00
|
|
|
global _thishost
|
|
|
|
if _thishost is None:
|
2013-06-01 15:12:17 -03:00
|
|
|
try:
|
|
|
|
_thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2])
|
|
|
|
except socket.gaierror:
|
|
|
|
_thishost = tuple(socket.gethostbyname_ex('localhost')[2])
|
2008-06-18 17:49:58 -03:00
|
|
|
return _thishost
|
|
|
|
|
|
|
|
_ftperrors = None
|
|
|
|
def ftperrors():
|
|
|
|
"""Return the set of errors raised by the FTP class."""
|
|
|
|
global _ftperrors
|
|
|
|
if _ftperrors is None:
|
|
|
|
import ftplib
|
|
|
|
_ftperrors = ftplib.all_errors
|
|
|
|
return _ftperrors
|
|
|
|
|
|
|
|
_noheaders = None
|
|
|
|
def noheaders():
|
2008-07-01 16:56:00 -03:00
|
|
|
"""Return an empty email Message object."""
|
2008-06-18 17:49:58 -03:00
|
|
|
global _noheaders
|
|
|
|
if _noheaders is None:
|
2008-07-01 16:56:00 -03:00
|
|
|
_noheaders = email.message_from_string("")
|
2008-06-18 17:49:58 -03:00
|
|
|
return _noheaders
|
|
|
|
|
|
|
|
|
|
|
|
# Utility classes
|
|
|
|
|
|
|
|
class ftpwrapper:
|
|
|
|
"""Class used by open_ftp() for cache of open FTP connections."""
|
|
|
|
|
2011-07-23 09:03:00 -03:00
|
|
|
def __init__(self, user, passwd, host, port, dirs, timeout=None,
|
|
|
|
persistent=True):
|
2008-06-18 17:49:58 -03:00
|
|
|
self.user = user
|
|
|
|
self.passwd = passwd
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.dirs = dirs
|
|
|
|
self.timeout = timeout
|
2011-07-23 09:03:00 -03:00
|
|
|
self.refcount = 0
|
|
|
|
self.keepalive = persistent
|
2008-06-18 17:49:58 -03:00
|
|
|
self.init()
|
|
|
|
|
|
|
|
def init(self):
|
|
|
|
import ftplib
|
|
|
|
self.busy = 0
|
|
|
|
self.ftp = ftplib.FTP()
|
|
|
|
self.ftp.connect(self.host, self.port, self.timeout)
|
|
|
|
self.ftp.login(self.user, self.passwd)
|
2013-06-02 15:59:47 -03:00
|
|
|
_target = '/'.join(self.dirs)
|
|
|
|
self.ftp.cwd(_target)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def retrfile(self, file, type):
|
|
|
|
import ftplib
|
|
|
|
self.endtransfer()
|
|
|
|
if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
|
|
|
|
else: cmd = 'TYPE ' + type; isdir = 0
|
|
|
|
try:
|
|
|
|
self.ftp.voidcmd(cmd)
|
|
|
|
except ftplib.all_errors:
|
|
|
|
self.init()
|
|
|
|
self.ftp.voidcmd(cmd)
|
|
|
|
conn = None
|
|
|
|
if file and not isdir:
|
|
|
|
# Try to retrieve as a file
|
|
|
|
try:
|
|
|
|
cmd = 'RETR ' + file
|
2011-03-24 00:46:19 -03:00
|
|
|
conn, retrlen = self.ftp.ntransfercmd(cmd)
|
2008-06-18 17:49:58 -03:00
|
|
|
except ftplib.error_perm as reason:
|
|
|
|
if str(reason)[:3] != '550':
|
2013-05-12 21:01:52 -03:00
|
|
|
raise URLError('ftp error: %r' % reason).with_traceback(
|
2008-07-01 16:56:00 -03:00
|
|
|
sys.exc_info()[2])
|
2008-06-18 17:49:58 -03:00
|
|
|
if not conn:
|
|
|
|
# Set transfer mode to ASCII!
|
|
|
|
self.ftp.voidcmd('TYPE A')
|
|
|
|
# Try a directory listing. Verify that directory exists.
|
|
|
|
if file:
|
|
|
|
pwd = self.ftp.pwd()
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
self.ftp.cwd(file)
|
|
|
|
except ftplib.error_perm as reason:
|
2013-05-12 21:01:52 -03:00
|
|
|
raise URLError('ftp error: %r' % reason) from reason
|
2008-06-18 17:49:58 -03:00
|
|
|
finally:
|
|
|
|
self.ftp.cwd(pwd)
|
|
|
|
cmd = 'LIST ' + file
|
|
|
|
else:
|
|
|
|
cmd = 'LIST'
|
2011-03-24 00:46:19 -03:00
|
|
|
conn, retrlen = self.ftp.ntransfercmd(cmd)
|
2008-06-18 17:49:58 -03:00
|
|
|
self.busy = 1
|
2011-03-24 00:46:19 -03:00
|
|
|
|
2011-07-23 09:03:00 -03:00
|
|
|
ftpobj = addclosehook(conn.makefile('rb'), self.file_close)
|
|
|
|
self.refcount += 1
|
2011-03-24 00:46:19 -03:00
|
|
|
conn.close()
|
2008-06-18 17:49:58 -03:00
|
|
|
# Pass back both a suitably decorated object and a retrieval length
|
2011-03-24 00:46:19 -03:00
|
|
|
return (ftpobj, retrlen)
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
def endtransfer(self):
|
|
|
|
self.busy = 0
|
|
|
|
|
|
|
|
def close(self):
|
2011-07-23 09:03:00 -03:00
|
|
|
self.keepalive = False
|
|
|
|
if self.refcount <= 0:
|
|
|
|
self.real_close()
|
|
|
|
|
|
|
|
def file_close(self):
|
|
|
|
self.endtransfer()
|
|
|
|
self.refcount -= 1
|
|
|
|
if self.refcount <= 0 and not self.keepalive:
|
|
|
|
self.real_close()
|
|
|
|
|
|
|
|
def real_close(self):
|
2008-06-18 17:49:58 -03:00
|
|
|
self.endtransfer()
|
|
|
|
try:
|
|
|
|
self.ftp.close()
|
|
|
|
except ftperrors():
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Proxy handling
|
|
|
|
def getproxies_environment():
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
Scan the environment for variables named <scheme>_proxy;
|
|
|
|
this seems to be the standard convention. If you need a
|
|
|
|
different way, you can pass a proxies dictionary to the
|
|
|
|
[Fancy]URLopener constructor.
|
|
|
|
|
|
|
|
"""
|
|
|
|
proxies = {}
|
|
|
|
for name, value in os.environ.items():
|
|
|
|
name = name.lower()
|
|
|
|
if value and name[-6:] == '_proxy':
|
|
|
|
proxies[name[:-6]] = value
|
|
|
|
return proxies
|
|
|
|
|
|
|
|
def proxy_bypass_environment(host):
|
|
|
|
"""Test if proxies should not be used for a particular host.
|
|
|
|
|
|
|
|
Checks the environment for a variable named no_proxy, which should
|
|
|
|
be a list of DNS suffixes separated by commas, or '*' for all hosts.
|
|
|
|
"""
|
|
|
|
no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
|
|
|
|
# '*' is special case for always bypass
|
|
|
|
if no_proxy == '*':
|
|
|
|
return 1
|
|
|
|
# strip port off host
|
2008-07-01 16:56:00 -03:00
|
|
|
hostonly, port = splitport(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
# check if the host ends with any of the DNS suffixes
|
2011-08-06 01:27:40 -03:00
|
|
|
no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')]
|
|
|
|
for name in no_proxy_list:
|
2008-06-18 17:49:58 -03:00
|
|
|
if name and (hostonly.endswith(name) or host.endswith(name)):
|
|
|
|
return 1
|
|
|
|
# otherwise, don't bypass
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
# This code tests an OSX specific data structure but is testable on all
|
|
|
|
# platforms
|
|
|
|
def _proxy_bypass_macosx_sysconf(host, proxy_settings):
|
|
|
|
"""
|
|
|
|
Return True iff this host shouldn't be accessed using a proxy
|
2008-06-18 17:49:58 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
This function uses the MacOSX framework SystemConfiguration
|
|
|
|
to fetch the proxy information.
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
proxy_settings come from _scproxy._get_proxy_settings or get mocked ie:
|
|
|
|
{ 'exclude_simple': bool,
|
|
|
|
'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16']
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
from fnmatch import fnmatch
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
hostonly, port = splitport(host)
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
def ip2num(ipAddr):
|
|
|
|
parts = ipAddr.split('.')
|
|
|
|
parts = list(map(int, parts))
|
|
|
|
if len(parts) != 4:
|
|
|
|
parts = (parts + [0, 0, 0, 0])[:4]
|
|
|
|
return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
# Check for simple host names:
|
|
|
|
if '.' not in host:
|
|
|
|
if proxy_settings['exclude_simple']:
|
|
|
|
return True
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
hostIP = None
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
for value in proxy_settings.get('exceptions', ()):
|
|
|
|
# Items in the list are strings like these: *.local, 169.254/16
|
|
|
|
if not value: continue
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
|
|
|
|
if m is not None:
|
|
|
|
if hostIP is None:
|
|
|
|
try:
|
|
|
|
hostIP = socket.gethostbyname(hostonly)
|
|
|
|
hostIP = ip2num(hostIP)
|
2012-12-18 17:10:48 -04:00
|
|
|
except OSError:
|
2011-03-14 19:15:25 -03:00
|
|
|
continue
|
|
|
|
|
|
|
|
base = ip2num(m.group(1))
|
|
|
|
mask = m.group(2)
|
|
|
|
if mask is None:
|
|
|
|
mask = 8 * (m.group(1).count('.') + 1)
|
|
|
|
else:
|
|
|
|
mask = int(mask[1:])
|
|
|
|
mask = 32 - mask
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
if (hostIP >> mask) == (base >> mask):
|
|
|
|
return True
|
2010-06-27 11:26:30 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
elif fnmatch(host, value):
|
|
|
|
return True
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
return False
|
2010-04-18 17:46:11 -03:00
|
|
|
|
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
if sys.platform == 'darwin':
|
|
|
|
from _scproxy import _get_proxy_settings, _get_proxies
|
2010-04-18 17:46:11 -03:00
|
|
|
|
2011-03-14 19:15:25 -03:00
|
|
|
def proxy_bypass_macosx_sysconf(host):
|
|
|
|
proxy_settings = _get_proxy_settings()
|
|
|
|
return _proxy_bypass_macosx_sysconf(host, proxy_settings)
|
2010-04-18 17:46:11 -03:00
|
|
|
|
|
|
|
def getproxies_macosx_sysconf():
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
This function uses the MacOSX framework SystemConfiguration
|
|
|
|
to fetch the proxy information.
|
|
|
|
"""
|
|
|
|
return _get_proxies()
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
|
|
|
|
def proxy_bypass(host):
|
|
|
|
if getproxies_environment():
|
|
|
|
return proxy_bypass_environment(host)
|
|
|
|
else:
|
2010-04-18 17:46:11 -03:00
|
|
|
return proxy_bypass_macosx_sysconf(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
def getproxies():
|
2010-04-18 17:46:11 -03:00
|
|
|
return getproxies_environment() or getproxies_macosx_sysconf()
|
|
|
|
|
2008-06-18 17:49:58 -03:00
|
|
|
|
|
|
|
elif os.name == 'nt':
|
|
|
|
def getproxies_registry():
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
Win32 uses the registry to store proxies.
|
|
|
|
|
|
|
|
"""
|
|
|
|
proxies = {}
|
|
|
|
try:
|
2009-04-01 01:28:33 -03:00
|
|
|
import winreg
|
2013-07-04 18:43:24 -03:00
|
|
|
except ImportError:
|
2008-06-18 17:49:58 -03:00
|
|
|
# Std module, so should be around - but you never know!
|
|
|
|
return proxies
|
|
|
|
try:
|
2009-04-01 01:28:33 -03:00
|
|
|
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
|
2008-06-18 17:49:58 -03:00
|
|
|
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
2009-04-01 01:28:33 -03:00
|
|
|
proxyEnable = winreg.QueryValueEx(internetSettings,
|
2008-06-18 17:49:58 -03:00
|
|
|
'ProxyEnable')[0]
|
|
|
|
if proxyEnable:
|
|
|
|
# Returned as Unicode but problems if not converted to ASCII
|
2009-04-01 01:28:33 -03:00
|
|
|
proxyServer = str(winreg.QueryValueEx(internetSettings,
|
2008-06-18 17:49:58 -03:00
|
|
|
'ProxyServer')[0])
|
|
|
|
if '=' in proxyServer:
|
|
|
|
# Per-protocol settings
|
|
|
|
for p in proxyServer.split(';'):
|
|
|
|
protocol, address = p.split('=', 1)
|
|
|
|
# See if address has a type:// prefix
|
|
|
|
if not re.match('^([^/:]+)://', address):
|
|
|
|
address = '%s://%s' % (protocol, address)
|
|
|
|
proxies[protocol] = address
|
|
|
|
else:
|
|
|
|
# Use one setting for all protocols
|
|
|
|
if proxyServer[:5] == 'http:':
|
|
|
|
proxies['http'] = proxyServer
|
|
|
|
else:
|
|
|
|
proxies['http'] = 'http://%s' % proxyServer
|
2010-07-14 17:10:52 -03:00
|
|
|
proxies['https'] = 'https://%s' % proxyServer
|
2008-06-18 17:49:58 -03:00
|
|
|
proxies['ftp'] = 'ftp://%s' % proxyServer
|
|
|
|
internetSettings.Close()
|
2012-12-19 08:33:35 -04:00
|
|
|
except (OSError, ValueError, TypeError):
|
2008-06-18 17:49:58 -03:00
|
|
|
# Either registry key not found etc, or the value in an
|
|
|
|
# unexpected format.
|
|
|
|
# proxies already set up to be empty so nothing to do
|
|
|
|
pass
|
|
|
|
return proxies
|
|
|
|
|
|
|
|
def getproxies():
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
Returns settings gathered from the environment, if specified,
|
|
|
|
or the registry.
|
|
|
|
|
|
|
|
"""
|
|
|
|
return getproxies_environment() or getproxies_registry()
|
|
|
|
|
|
|
|
def proxy_bypass_registry(host):
|
|
|
|
try:
|
2009-04-01 01:28:33 -03:00
|
|
|
import winreg
|
2013-07-04 18:43:24 -03:00
|
|
|
except ImportError:
|
2008-06-18 17:49:58 -03:00
|
|
|
# Std modules, so should be around - but you never know!
|
|
|
|
return 0
|
|
|
|
try:
|
2009-04-01 01:28:33 -03:00
|
|
|
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
|
2008-06-18 17:49:58 -03:00
|
|
|
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
2009-04-01 01:28:33 -03:00
|
|
|
proxyEnable = winreg.QueryValueEx(internetSettings,
|
2008-06-18 17:49:58 -03:00
|
|
|
'ProxyEnable')[0]
|
2009-04-01 01:28:33 -03:00
|
|
|
proxyOverride = str(winreg.QueryValueEx(internetSettings,
|
2008-06-18 17:49:58 -03:00
|
|
|
'ProxyOverride')[0])
|
|
|
|
# ^^^^ Returned as Unicode but problems if not converted to ASCII
|
2012-12-19 08:33:35 -04:00
|
|
|
except OSError:
|
2008-06-18 17:49:58 -03:00
|
|
|
return 0
|
|
|
|
if not proxyEnable or not proxyOverride:
|
|
|
|
return 0
|
|
|
|
# try to make a host list from name and IP address.
|
2008-07-01 16:56:00 -03:00
|
|
|
rawHost, port = splitport(host)
|
2008-06-18 17:49:58 -03:00
|
|
|
host = [rawHost]
|
|
|
|
try:
|
|
|
|
addr = socket.gethostbyname(rawHost)
|
|
|
|
if addr != rawHost:
|
|
|
|
host.append(addr)
|
2012-12-18 17:10:48 -04:00
|
|
|
except OSError:
|
2008-06-18 17:49:58 -03:00
|
|
|
pass
|
|
|
|
try:
|
|
|
|
fqdn = socket.getfqdn(rawHost)
|
|
|
|
if fqdn != rawHost:
|
|
|
|
host.append(fqdn)
|
2012-12-18 17:10:48 -04:00
|
|
|
except OSError:
|
2008-06-18 17:49:58 -03:00
|
|
|
pass
|
|
|
|
# make a check value list from the registry entry: replace the
|
|
|
|
# '<local>' string by the localhost entry and the corresponding
|
|
|
|
# canonical entry.
|
|
|
|
proxyOverride = proxyOverride.split(';')
|
|
|
|
# now check if we match one of the registry values.
|
|
|
|
for test in proxyOverride:
|
2009-05-01 03:00:23 -03:00
|
|
|
if test == '<local>':
|
|
|
|
if '.' not in rawHost:
|
|
|
|
return 1
|
2008-06-18 17:49:58 -03:00
|
|
|
test = test.replace(".", r"\.") # mask dots
|
|
|
|
test = test.replace("*", r".*") # change glob sequence
|
|
|
|
test = test.replace("?", r".") # change glob char
|
|
|
|
for val in host:
|
|
|
|
if re.match(test, val, re.I):
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def proxy_bypass(host):
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
Returns settings gathered from the environment, if specified,
|
|
|
|
or the registry.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if getproxies_environment():
|
|
|
|
return proxy_bypass_environment(host)
|
|
|
|
else:
|
|
|
|
return proxy_bypass_registry(host)
|
|
|
|
|
|
|
|
else:
|
|
|
|
# By default use environment variables
|
|
|
|
getproxies = getproxies_environment
|
|
|
|
proxy_bypass = proxy_bypass_environment
|