2000-02-04 11:28:42 -04:00
|
|
|
"""Open an arbitrary URL.
|
|
|
|
|
|
|
|
See the following document for more info on URLs:
|
|
|
|
"Names and Addresses, URIs, URLs, URNs, URCs", at
|
|
|
|
http://www.w3.org/pub/WWW/Addressing/Overview.html
|
|
|
|
|
|
|
|
See also the HTTP spec (from which the error codes are derived):
|
|
|
|
"HTTP - Hypertext Transfer Protocol", at
|
|
|
|
http://www.w3.org/pub/WWW/Protocols/
|
|
|
|
|
|
|
|
Related standards and specs:
|
|
|
|
- RFC1808: the "relative URL" spec. (authoritative status)
|
|
|
|
- RFC1738 - the "URL standard". (authoritative status)
|
|
|
|
- RFC1630 - the "URI spec". (informational status)
|
|
|
|
|
|
|
|
The object returned by URLopener().open(file) will differ per
|
|
|
|
protocol. All you know is that is has methods read(), readline(),
|
|
|
|
readlines(), fileno(), close() and info(). The read*(), fileno()
|
2000-09-24 15:51:25 -03:00
|
|
|
and close() methods work like those of open files.
|
2000-02-04 11:28:42 -04:00
|
|
|
The info() method returns a mimetools.Message object which can be
|
|
|
|
used to query various info about the object, if available.
|
|
|
|
(mimetools.Message objects are queried with the getheader() method.)
|
|
|
|
"""
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1994-07-04 19:14:49 -03:00
|
|
|
import string
|
1994-03-22 08:05:32 -04:00
|
|
|
import socket
|
1995-12-15 09:22:13 -04:00
|
|
|
import os
|
2001-08-09 14:43:35 -03:00
|
|
|
import time
|
1996-11-20 18:02:24 -04:00
|
|
|
import sys
|
2012-01-10 12:09:24 -04:00
|
|
|
import base64
|
2013-03-14 16:31:09 -03:00
|
|
|
import re
|
2012-01-10 12:09:24 -04:00
|
|
|
|
2004-03-23 17:26:39 -04:00
|
|
|
from urlparse import urljoin as basejoin
|
1994-03-22 08:05:32 -04:00
|
|
|
|
2001-03-01 00:27:19 -04:00
|
|
|
__all__ = ["urlopen", "URLopener", "FancyURLopener", "urlretrieve",
|
|
|
|
"urlcleanup", "quote", "quote_plus", "unquote", "unquote_plus",
|
2001-03-13 15:47:16 -04:00
|
|
|
"urlencode", "url2pathname", "pathname2url", "splittag",
|
|
|
|
"localhost", "thishost", "ftperrors", "basejoin", "unwrap",
|
|
|
|
"splittype", "splithost", "splituser", "splitpasswd", "splitport",
|
|
|
|
"splitnport", "splitquery", "splitattr", "splitvalue",
|
2007-05-16 19:42:29 -03:00
|
|
|
"getproxies"]
|
2001-03-01 00:27:19 -04:00
|
|
|
|
2006-01-24 11:51:21 -04:00
|
|
|
__version__ = '1.17' # XXX This version is not always updated :-(
|
1997-06-06 18:11:11 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
|
1995-06-22 16:00:13 -03:00
|
|
|
|
1995-12-15 09:22:13 -04:00
|
|
|
# Helper for non-unix systems
|
2010-05-05 16:09:31 -03:00
|
|
|
if os.name == 'nt':
|
2000-09-24 15:51:25 -03:00
|
|
|
from nturl2path import url2pathname, pathname2url
|
2001-03-02 02:43:49 -04:00
|
|
|
elif os.name == 'riscos':
|
|
|
|
from rourl2path import url2pathname, pathname2url
|
1995-12-15 09:22:13 -04:00
|
|
|
else:
|
1999-02-25 12:12:12 -04:00
|
|
|
def url2pathname(pathname):
|
2005-12-26 18:53:56 -04:00
|
|
|
"""OS-specific conversion from a relative URL of the 'file' scheme
|
|
|
|
to a file system path; not recommended for general use."""
|
1999-03-12 10:31:10 -04:00
|
|
|
return unquote(pathname)
|
2005-12-26 18:53:56 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def pathname2url(pathname):
|
2005-12-26 18:53:56 -04:00
|
|
|
"""OS-specific conversion from a file system path to a relative URL
|
|
|
|
of the 'file' scheme; not recommended for general use."""
|
1999-03-12 10:31:10 -04:00
|
|
|
return quote(pathname)
|
1998-12-18 11:25:22 -04:00
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
# This really consists of two pieces:
|
|
|
|
# (1) a class which handles opening of all sorts of URLs
|
|
|
|
# (plus assorted utilities etc.)
|
|
|
|
# (2) a set of functions for parsing URLs
|
|
|
|
# XXX Should these be separated out into different modules?
|
|
|
|
|
|
|
|
|
|
|
|
# Shortcut for basic usage
|
|
|
|
_urlopener = None
|
2014-11-23 22:55:24 -04:00
|
|
|
def urlopen(url, data=None, proxies=None, context=None):
|
2008-07-01 22:57:08 -03:00
|
|
|
"""Create a file-like object for the specified URL to read from."""
|
|
|
|
from warnings import warnpy3k
|
2010-02-06 18:44:17 -04:00
|
|
|
warnpy3k("urllib.urlopen() has been removed in Python 3.0 in "
|
|
|
|
"favor of urllib2.urlopen()", stacklevel=2)
|
2008-07-01 22:57:08 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
global _urlopener
|
2014-11-23 22:55:24 -04:00
|
|
|
if proxies is not None or context is not None:
|
|
|
|
opener = FancyURLopener(proxies=proxies, context=context)
|
2002-04-04 16:41:34 -04:00
|
|
|
elif not _urlopener:
|
|
|
|
opener = FancyURLopener()
|
|
|
|
_urlopener = opener
|
|
|
|
else:
|
|
|
|
opener = _urlopener
|
1999-02-25 12:12:12 -04:00
|
|
|
if data is None:
|
2002-04-04 16:41:34 -04:00
|
|
|
return opener.open(url)
|
1999-02-25 12:12:12 -04:00
|
|
|
else:
|
2002-04-04 16:41:34 -04:00
|
|
|
return opener.open(url, data)
|
2014-11-23 22:55:24 -04:00
|
|
|
def urlretrieve(url, filename=None, reporthook=None, data=None, context=None):
|
1999-02-25 12:12:12 -04:00
|
|
|
global _urlopener
|
2014-11-23 22:55:24 -04:00
|
|
|
if context is not None:
|
|
|
|
opener = FancyURLopener(context=context)
|
|
|
|
elif not _urlopener:
|
|
|
|
_urlopener = opener = FancyURLopener()
|
|
|
|
else:
|
|
|
|
opener = _urlopener
|
|
|
|
return opener.retrieve(url, filename, reporthook, data)
|
1994-03-22 08:05:32 -04:00
|
|
|
def urlcleanup():
|
1999-02-25 12:12:12 -04:00
|
|
|
if _urlopener:
|
|
|
|
_urlopener.cleanup()
|
2010-05-17 10:35:09 -03:00
|
|
|
_safe_quoters.clear()
|
2009-12-08 15:35:12 -04:00
|
|
|
ftpcache.clear()
|
1994-03-22 08:05:32 -04:00
|
|
|
|
2007-08-29 19:35:05 -03:00
|
|
|
# check for SSL
|
|
|
|
try:
|
|
|
|
import ssl
|
|
|
|
except:
|
|
|
|
_have_ssl = False
|
|
|
|
else:
|
|
|
|
_have_ssl = True
|
|
|
|
|
2005-08-24 15:46:39 -03:00
|
|
|
# exception raised when downloaded size does not match content-length
|
|
|
|
class ContentTooShortError(IOError):
|
|
|
|
def __init__(self, message, content):
|
|
|
|
IOError.__init__(self, message)
|
|
|
|
self.content = content
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
ftpcache = {}
|
|
|
|
class URLopener:
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Class to open URLs.
|
|
|
|
This is a class rather than just a subroutine because we may need
|
|
|
|
more than one set of global protocol-specific options.
|
|
|
|
Note -- this is a base class for those who don't want the
|
|
|
|
automatic handling of errors type 302 (relocated) and 401
|
|
|
|
(authorization needed)."""
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
__tempfiles = None
|
|
|
|
|
2000-08-24 13:18:04 -03:00
|
|
|
version = "Python-urllib/%s" % __version__
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
# Constructor
|
2014-11-23 22:55:24 -04:00
|
|
|
def __init__(self, proxies=None, context=None, **x509):
|
1999-02-25 12:12:12 -04:00
|
|
|
if proxies is None:
|
|
|
|
proxies = getproxies()
|
|
|
|
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
|
|
|
|
self.proxies = proxies
|
1999-12-07 17:37:17 -04:00
|
|
|
self.key_file = x509.get('key_file')
|
|
|
|
self.cert_file = x509.get('cert_file')
|
2014-11-23 22:55:24 -04:00
|
|
|
self.context = context
|
2016-09-09 20:23:06 -03:00
|
|
|
self.addheaders = [('User-Agent', self.version), ('Accept', '*/*')]
|
1999-02-25 12:12:12 -04:00
|
|
|
self.__tempfiles = []
|
|
|
|
self.__unlink = os.unlink # See cleanup()
|
|
|
|
self.tempcache = None
|
|
|
|
# Undocumented feature: if you assign {} to tempcache,
|
|
|
|
# it is used to cache files retrieved with
|
|
|
|
# self.retrieve(). This is not enabled by default
|
|
|
|
# since it does not work for changing documents (and I
|
|
|
|
# haven't got the logic to check expiration headers
|
|
|
|
# yet).
|
|
|
|
self.ftpcache = ftpcache
|
|
|
|
# Undocumented feature: you can use a different
|
|
|
|
# ftp cache by assigning to the .ftpcache member;
|
|
|
|
# in case you want logically independent URL openers
|
|
|
|
# XXX This is not threadsafe. Bah.
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self.close()
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cleanup()
|
|
|
|
|
|
|
|
def cleanup(self):
|
|
|
|
# This code sometimes runs when the rest of this module
|
|
|
|
# has already been deleted, so it can't use any globals
|
|
|
|
# or import anything.
|
|
|
|
if self.__tempfiles:
|
|
|
|
for file in self.__tempfiles:
|
|
|
|
try:
|
|
|
|
self.__unlink(file)
|
2001-08-11 12:02:57 -03:00
|
|
|
except OSError:
|
1999-02-25 12:12:12 -04:00
|
|
|
pass
|
|
|
|
del self.__tempfiles[:]
|
|
|
|
if self.tempcache:
|
|
|
|
self.tempcache.clear()
|
|
|
|
|
|
|
|
def addheader(self, *args):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Add a header to be used by the HTTP interface only
|
|
|
|
e.g. u.addheader('Accept', 'sound/basic')"""
|
1999-02-25 12:12:12 -04:00
|
|
|
self.addheaders.append(args)
|
|
|
|
|
|
|
|
# External interface
|
|
|
|
def open(self, fullurl, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Use URLopener().open(file) instead of open(file, 'r')."""
|
2000-12-03 14:30:10 -04:00
|
|
|
fullurl = unwrap(toBytes(fullurl))
|
2009-04-21 00:24:19 -03:00
|
|
|
# percent encode url, fixing lame server errors for e.g, like space
|
|
|
|
# within url paths.
|
2010-02-20 18:05:34 -04:00
|
|
|
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
|
2002-06-01 11:18:47 -03:00
|
|
|
if self.tempcache and fullurl in self.tempcache:
|
1999-02-25 12:12:12 -04:00
|
|
|
filename, headers = self.tempcache[fullurl]
|
|
|
|
fp = open(filename, 'rb')
|
|
|
|
return addinfourl(fp, headers, fullurl)
|
2000-12-03 14:30:10 -04:00
|
|
|
urltype, url = splittype(fullurl)
|
|
|
|
if not urltype:
|
|
|
|
urltype = 'file'
|
2002-06-01 11:18:47 -03:00
|
|
|
if urltype in self.proxies:
|
2000-12-03 14:30:10 -04:00
|
|
|
proxy = self.proxies[urltype]
|
|
|
|
urltype, proxyhost = splittype(proxy)
|
2000-10-02 20:04:02 -03:00
|
|
|
host, selector = splithost(proxyhost)
|
1999-02-25 12:12:12 -04:00
|
|
|
url = (host, fullurl) # Signal special case to open_*()
|
2000-10-02 20:04:02 -03:00
|
|
|
else:
|
|
|
|
proxy = None
|
2000-12-03 14:30:10 -04:00
|
|
|
name = 'open_' + urltype
|
|
|
|
self.type = urltype
|
2004-03-23 19:50:17 -04:00
|
|
|
name = name.replace('-', '_')
|
1999-02-25 12:12:12 -04:00
|
|
|
if not hasattr(self, name):
|
2000-10-02 20:04:02 -03:00
|
|
|
if proxy:
|
|
|
|
return self.open_unknown_proxy(proxy, fullurl, data)
|
1999-02-25 12:12:12 -04:00
|
|
|
else:
|
|
|
|
return self.open_unknown(fullurl, data)
|
|
|
|
try:
|
|
|
|
if data is None:
|
|
|
|
return getattr(self, name)(url)
|
|
|
|
else:
|
|
|
|
return getattr(self, name)(url, data)
|
|
|
|
except socket.error, msg:
|
|
|
|
raise IOError, ('socket error', msg), sys.exc_info()[2]
|
|
|
|
|
|
|
|
def open_unknown(self, fullurl, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Overridable interface to open unknown URL type."""
|
1999-02-25 12:12:12 -04:00
|
|
|
type, url = splittype(fullurl)
|
|
|
|
raise IOError, ('url error', 'unknown url type', type)
|
|
|
|
|
2000-10-02 20:04:02 -03:00
|
|
|
def open_unknown_proxy(self, proxy, fullurl, data=None):
|
|
|
|
"""Overridable interface to open unknown URL type."""
|
|
|
|
type, url = splittype(fullurl)
|
|
|
|
raise IOError, ('url error', 'invalid proxy for %s' % type, proxy)
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
# External interface
|
2000-08-25 08:23:36 -03:00
|
|
|
def retrieve(self, url, filename=None, reporthook=None, data=None):
|
2003-04-23 23:43:20 -03:00
|
|
|
"""retrieve(url) returns (filename, headers) for a local object
|
2000-02-04 11:28:42 -04:00
|
|
|
or (tempfilename, headers) for a remote object."""
|
2000-12-03 14:30:10 -04:00
|
|
|
url = unwrap(toBytes(url))
|
2002-06-01 11:18:47 -03:00
|
|
|
if self.tempcache and url in self.tempcache:
|
1999-02-25 12:12:12 -04:00
|
|
|
return self.tempcache[url]
|
|
|
|
type, url1 = splittype(url)
|
2002-06-02 00:04:52 -03:00
|
|
|
if filename is None and (not type or type == 'file'):
|
1999-02-25 12:12:12 -04:00
|
|
|
try:
|
|
|
|
fp = self.open_local_file(url1)
|
|
|
|
hdrs = fp.info()
|
2009-12-02 22:40:13 -04:00
|
|
|
fp.close()
|
1999-02-25 12:12:12 -04:00
|
|
|
return url2pathname(splithost(url1)[1]), hdrs
|
2010-02-06 18:59:15 -04:00
|
|
|
except IOError:
|
1999-02-25 12:12:12 -04:00
|
|
|
pass
|
2000-08-23 22:01:26 -03:00
|
|
|
fp = self.open(url, data)
|
2009-03-22 14:45:11 -03:00
|
|
|
try:
|
|
|
|
headers = fp.info()
|
|
|
|
if filename:
|
|
|
|
tfp = open(filename, 'wb')
|
|
|
|
else:
|
|
|
|
import tempfile
|
|
|
|
garbage, path = splittype(url)
|
|
|
|
garbage, path = splithost(path or "")
|
|
|
|
path, garbage = splitquery(path or "")
|
|
|
|
path, garbage = splitattr(path or "")
|
|
|
|
suffix = os.path.splitext(path)[1]
|
|
|
|
(fd, filename) = tempfile.mkstemp(suffix)
|
|
|
|
self.__tempfiles.append(filename)
|
|
|
|
tfp = os.fdopen(fd, 'wb')
|
|
|
|
try:
|
|
|
|
result = filename, headers
|
|
|
|
if self.tempcache is not None:
|
|
|
|
self.tempcache[url] = result
|
|
|
|
bs = 1024*8
|
|
|
|
size = -1
|
|
|
|
read = 0
|
|
|
|
blocknum = 0
|
2011-10-31 15:44:45 -03:00
|
|
|
if "content-length" in headers:
|
|
|
|
size = int(headers["Content-Length"])
|
2009-03-22 14:45:11 -03:00
|
|
|
if reporthook:
|
|
|
|
reporthook(blocknum, bs, size)
|
|
|
|
while 1:
|
|
|
|
block = fp.read(bs)
|
|
|
|
if block == "":
|
|
|
|
break
|
|
|
|
read += len(block)
|
|
|
|
tfp.write(block)
|
|
|
|
blocknum += 1
|
|
|
|
if reporthook:
|
|
|
|
reporthook(blocknum, bs, size)
|
|
|
|
finally:
|
|
|
|
tfp.close()
|
|
|
|
finally:
|
|
|
|
fp.close()
|
2005-08-24 15:46:39 -03:00
|
|
|
|
|
|
|
# raise exception if actual size does not match content-length header
|
|
|
|
if size >= 0 and read < size:
|
|
|
|
raise ContentTooShortError("retrieval incomplete: got only %i out "
|
|
|
|
"of %i bytes" % (read, size), result)
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
return result
|
|
|
|
|
|
|
|
# Each method named open_<type> knows how to open that type of URL
|
|
|
|
|
|
|
|
def open_http(self, url, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Use HTTP protocol."""
|
1999-02-25 12:12:12 -04:00
|
|
|
import httplib
|
|
|
|
user_passwd = None
|
2006-01-24 11:51:21 -04:00
|
|
|
proxy_passwd= None
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
if isinstance(url, str):
|
1999-02-25 12:12:12 -04:00
|
|
|
host, selector = splithost(url)
|
|
|
|
if host:
|
|
|
|
user_passwd, host = splituser(host)
|
|
|
|
host = unquote(host)
|
|
|
|
realhost = host
|
|
|
|
else:
|
|
|
|
host, selector = url
|
2006-01-24 11:51:21 -04:00
|
|
|
# check whether the proxy contains authorization information
|
|
|
|
proxy_passwd, host = splituser(host)
|
|
|
|
# now we proceed with the url we want to obtain
|
1999-02-25 12:12:12 -04:00
|
|
|
urltype, rest = splittype(selector)
|
|
|
|
url = rest
|
|
|
|
user_passwd = None
|
2000-12-15 11:01:37 -04:00
|
|
|
if urltype.lower() != 'http':
|
1999-02-25 12:12:12 -04:00
|
|
|
realhost = None
|
|
|
|
else:
|
|
|
|
realhost, rest = splithost(rest)
|
|
|
|
if realhost:
|
|
|
|
user_passwd, realhost = splituser(realhost)
|
|
|
|
if user_passwd:
|
|
|
|
selector = "%s://%s%s" % (urltype, realhost, rest)
|
2001-08-09 15:04:14 -03:00
|
|
|
if proxy_bypass(realhost):
|
|
|
|
host = realhost
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
#print "proxy via http:", host, selector
|
|
|
|
if not host: raise IOError, ('http error', 'no host given')
|
2006-01-24 18:44:08 -04:00
|
|
|
|
2006-01-24 11:51:21 -04:00
|
|
|
if proxy_passwd:
|
2012-01-10 12:09:24 -04:00
|
|
|
proxy_passwd = unquote(proxy_passwd)
|
2006-10-27 14:11:23 -03:00
|
|
|
proxy_auth = base64.b64encode(proxy_passwd).strip()
|
2006-01-24 11:51:21 -04:00
|
|
|
else:
|
|
|
|
proxy_auth = None
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
if user_passwd:
|
2012-01-10 12:09:24 -04:00
|
|
|
user_passwd = unquote(user_passwd)
|
2006-10-27 14:11:23 -03:00
|
|
|
auth = base64.b64encode(user_passwd).strip()
|
1999-02-25 12:12:12 -04:00
|
|
|
else:
|
|
|
|
auth = None
|
|
|
|
h = httplib.HTTP(host)
|
|
|
|
if data is not None:
|
|
|
|
h.putrequest('POST', selector)
|
2006-07-26 04:40:17 -03:00
|
|
|
h.putheader('Content-Type', 'application/x-www-form-urlencoded')
|
|
|
|
h.putheader('Content-Length', '%d' % len(data))
|
1999-02-25 12:12:12 -04:00
|
|
|
else:
|
|
|
|
h.putrequest('GET', selector)
|
2006-01-24 11:51:21 -04:00
|
|
|
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
|
1999-02-25 12:12:12 -04:00
|
|
|
if auth: h.putheader('Authorization', 'Basic %s' % auth)
|
|
|
|
if realhost: h.putheader('Host', realhost)
|
2003-02-27 16:14:51 -04:00
|
|
|
for args in self.addheaders: h.putheader(*args)
|
2009-01-09 16:27:16 -04:00
|
|
|
h.endheaders(data)
|
1999-02-25 12:12:12 -04:00
|
|
|
errcode, errmsg, headers = h.getreply()
|
2007-03-20 05:14:57 -03:00
|
|
|
fp = h.getfile()
|
2007-03-14 05:27:52 -03:00
|
|
|
if errcode == -1:
|
2007-03-20 05:14:57 -03:00
|
|
|
if fp: fp.close()
|
2007-03-14 05:27:52 -03:00
|
|
|
# something went wrong with the HTTP status line
|
|
|
|
raise IOError, ('http protocol error', 0,
|
|
|
|
'got a bad status line', None)
|
2007-09-19 04:52:56 -03:00
|
|
|
# According to RFC 2616, "2xx" code indicates that the client's
|
|
|
|
# request was successfully received, understood, and accepted.
|
2008-01-02 00:11:28 -04:00
|
|
|
if (200 <= errcode < 300):
|
2008-01-20 07:43:03 -04:00
|
|
|
return addinfourl(fp, headers, "http:" + url, errcode)
|
1999-02-25 12:12:12 -04:00
|
|
|
else:
|
|
|
|
if data is None:
|
|
|
|
return self.http_error(url, fp, errcode, errmsg, headers)
|
1999-03-09 15:31:21 -04:00
|
|
|
else:
|
|
|
|
return self.http_error(url, fp, errcode, errmsg, headers, data)
|
1999-02-25 12:12:12 -04:00
|
|
|
|
|
|
|
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Handle http errors.
|
|
|
|
Derived class can override this, or provide specific handlers
|
|
|
|
named http_error_DDD where DDD is the 3-digit error code."""
|
1999-02-25 12:12:12 -04:00
|
|
|
# First check if there's a specific handler for this error
|
|
|
|
name = 'http_error_%d' % errcode
|
|
|
|
if hasattr(self, name):
|
|
|
|
method = getattr(self, name)
|
|
|
|
if data is None:
|
|
|
|
result = method(url, fp, errcode, errmsg, headers)
|
1999-02-25 12:14:58 -04:00
|
|
|
else:
|
|
|
|
result = method(url, fp, errcode, errmsg, headers, data)
|
1999-02-25 12:12:12 -04:00
|
|
|
if result: return result
|
1999-02-25 12:14:58 -04:00
|
|
|
return self.http_error_default(url, fp, errcode, errmsg, headers)
|
1999-02-25 12:12:12 -04:00
|
|
|
|
|
|
|
def http_error_default(self, url, fp, errcode, errmsg, headers):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Default error handler: close the connection and raise IOError."""
|
1999-02-25 12:12:12 -04:00
|
|
|
fp.close()
|
|
|
|
raise IOError, ('http error', errcode, errmsg, headers)
|
|
|
|
|
2007-08-29 19:35:05 -03:00
|
|
|
if _have_ssl:
|
2000-04-22 23:53:11 -03:00
|
|
|
def open_https(self, url, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Use HTTPS protocol."""
|
2007-08-29 19:35:05 -03:00
|
|
|
|
1999-12-07 17:37:17 -04:00
|
|
|
import httplib
|
2000-08-21 18:42:42 -03:00
|
|
|
user_passwd = None
|
2006-01-24 11:51:21 -04:00
|
|
|
proxy_passwd = None
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
if isinstance(url, str):
|
1999-12-07 17:37:17 -04:00
|
|
|
host, selector = splithost(url)
|
2000-08-21 18:42:42 -03:00
|
|
|
if host:
|
|
|
|
user_passwd, host = splituser(host)
|
|
|
|
host = unquote(host)
|
|
|
|
realhost = host
|
1999-12-07 17:37:17 -04:00
|
|
|
else:
|
|
|
|
host, selector = url
|
2006-01-24 11:51:21 -04:00
|
|
|
# here, we determine, whether the proxy contains authorization information
|
|
|
|
proxy_passwd, host = splituser(host)
|
1999-12-07 17:37:17 -04:00
|
|
|
urltype, rest = splittype(selector)
|
2000-08-21 18:42:42 -03:00
|
|
|
url = rest
|
|
|
|
user_passwd = None
|
2000-12-15 11:01:37 -04:00
|
|
|
if urltype.lower() != 'https':
|
2000-08-21 18:42:42 -03:00
|
|
|
realhost = None
|
|
|
|
else:
|
1999-12-07 17:37:17 -04:00
|
|
|
realhost, rest = splithost(rest)
|
2000-08-21 18:42:42 -03:00
|
|
|
if realhost:
|
|
|
|
user_passwd, realhost = splituser(realhost)
|
1999-12-07 17:37:17 -04:00
|
|
|
if user_passwd:
|
|
|
|
selector = "%s://%s%s" % (urltype, realhost, rest)
|
2000-06-09 22:41:48 -03:00
|
|
|
#print "proxy via https:", host, selector
|
1999-12-07 17:37:17 -04:00
|
|
|
if not host: raise IOError, ('https error', 'no host given')
|
2006-01-24 11:51:21 -04:00
|
|
|
if proxy_passwd:
|
2012-01-10 12:09:24 -04:00
|
|
|
proxy_passwd = unquote(proxy_passwd)
|
2006-10-27 14:11:23 -03:00
|
|
|
proxy_auth = base64.b64encode(proxy_passwd).strip()
|
2006-01-24 11:51:21 -04:00
|
|
|
else:
|
|
|
|
proxy_auth = None
|
1999-12-07 17:37:17 -04:00
|
|
|
if user_passwd:
|
2012-01-10 12:09:24 -04:00
|
|
|
user_passwd = unquote(user_passwd)
|
2006-10-27 14:11:23 -03:00
|
|
|
auth = base64.b64encode(user_passwd).strip()
|
1999-12-07 17:37:17 -04:00
|
|
|
else:
|
|
|
|
auth = None
|
|
|
|
h = httplib.HTTPS(host, 0,
|
|
|
|
key_file=self.key_file,
|
2014-11-23 22:55:24 -04:00
|
|
|
cert_file=self.cert_file,
|
|
|
|
context=self.context)
|
2000-04-22 23:53:11 -03:00
|
|
|
if data is not None:
|
|
|
|
h.putrequest('POST', selector)
|
2006-07-26 04:40:17 -03:00
|
|
|
h.putheader('Content-Type',
|
2000-04-22 23:53:11 -03:00
|
|
|
'application/x-www-form-urlencoded')
|
2006-07-26 04:40:17 -03:00
|
|
|
h.putheader('Content-Length', '%d' % len(data))
|
2000-04-22 23:53:11 -03:00
|
|
|
else:
|
|
|
|
h.putrequest('GET', selector)
|
2006-12-19 11:11:41 -04:00
|
|
|
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
|
|
|
|
if auth: h.putheader('Authorization', 'Basic %s' % auth)
|
2000-08-21 18:42:42 -03:00
|
|
|
if realhost: h.putheader('Host', realhost)
|
2003-02-27 16:14:51 -04:00
|
|
|
for args in self.addheaders: h.putheader(*args)
|
2009-01-09 16:27:16 -04:00
|
|
|
h.endheaders(data)
|
1999-12-07 17:37:17 -04:00
|
|
|
errcode, errmsg, headers = h.getreply()
|
2007-03-20 05:14:57 -03:00
|
|
|
fp = h.getfile()
|
2007-03-14 05:27:52 -03:00
|
|
|
if errcode == -1:
|
2007-03-20 05:14:57 -03:00
|
|
|
if fp: fp.close()
|
2007-03-14 05:27:52 -03:00
|
|
|
# something went wrong with the HTTP status line
|
|
|
|
raise IOError, ('http protocol error', 0,
|
|
|
|
'got a bad status line', None)
|
2007-09-24 15:08:24 -03:00
|
|
|
# According to RFC 2616, "2xx" code indicates that the client's
|
|
|
|
# request was successfully received, understood, and accepted.
|
2008-01-02 00:11:28 -04:00
|
|
|
if (200 <= errcode < 300):
|
2008-01-20 07:43:03 -04:00
|
|
|
return addinfourl(fp, headers, "https:" + url, errcode)
|
1999-12-07 17:37:17 -04:00
|
|
|
else:
|
2000-08-21 18:42:42 -03:00
|
|
|
if data is None:
|
|
|
|
return self.http_error(url, fp, errcode, errmsg, headers)
|
|
|
|
else:
|
2000-12-15 11:01:37 -04:00
|
|
|
return self.http_error(url, fp, errcode, errmsg, headers,
|
|
|
|
data)
|
2000-08-21 18:42:42 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def open_file(self, url):
|
2006-04-09 01:00:49 -03:00
|
|
|
"""Use local file or FTP depending on form of URL."""
|
2006-01-24 11:51:21 -04:00
|
|
|
if not isinstance(url, str):
|
|
|
|
raise IOError, ('file error', 'proxy support for file protocol currently not implemented')
|
2002-09-12 17:14:04 -03:00
|
|
|
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
|
1999-02-25 12:12:12 -04:00
|
|
|
return self.open_ftp(url)
|
|
|
|
else:
|
|
|
|
return self.open_local_file(url)
|
|
|
|
|
|
|
|
def open_local_file(self, url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Use local file."""
|
2007-01-22 15:40:21 -04:00
|
|
|
import mimetypes, mimetools, email.utils
|
2004-12-31 15:15:26 -04:00
|
|
|
try:
|
|
|
|
from cStringIO import StringIO
|
|
|
|
except ImportError:
|
|
|
|
from StringIO import StringIO
|
2001-08-09 14:43:35 -03:00
|
|
|
host, file = splithost(url)
|
|
|
|
localname = url2pathname(file)
|
2002-04-14 21:25:01 -03:00
|
|
|
try:
|
|
|
|
stats = os.stat(localname)
|
|
|
|
except OSError, e:
|
|
|
|
raise IOError(e.errno, e.strerror, e.filename)
|
2002-03-22 13:30:38 -04:00
|
|
|
size = stats.st_size
|
2007-01-22 15:40:21 -04:00
|
|
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
1999-02-25 12:12:12 -04:00
|
|
|
mtype = mimetypes.guess_type(url)[0]
|
2004-12-31 15:15:26 -04:00
|
|
|
headers = mimetools.Message(StringIO(
|
2001-08-09 14:43:35 -03:00
|
|
|
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
|
|
|
|
(mtype or 'text/plain', size, modified)))
|
1999-02-25 12:12:12 -04:00
|
|
|
if not host:
|
1999-06-24 12:27:36 -03:00
|
|
|
urlfile = file
|
|
|
|
if file[:1] == '/':
|
|
|
|
urlfile = 'file://' + file
|
2012-01-20 23:43:02 -04:00
|
|
|
elif file[:2] == './':
|
|
|
|
raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url)
|
2001-08-09 14:43:35 -03:00
|
|
|
return addinfourl(open(localname, 'rb'),
|
1999-06-24 12:27:36 -03:00
|
|
|
headers, urlfile)
|
1999-02-25 12:12:12 -04:00
|
|
|
host, port = splitport(host)
|
|
|
|
if not port \
|
2000-09-24 15:51:25 -03:00
|
|
|
and socket.gethostbyname(host) in (localhost(), thishost()):
|
1999-06-24 12:27:36 -03:00
|
|
|
urlfile = file
|
|
|
|
if file[:1] == '/':
|
|
|
|
urlfile = 'file://' + file
|
2001-08-09 14:43:35 -03:00
|
|
|
return addinfourl(open(localname, 'rb'),
|
1999-06-24 12:27:36 -03:00
|
|
|
headers, urlfile)
|
1999-02-25 12:12:12 -04:00
|
|
|
raise IOError, ('local file error', 'not on local host')
|
|
|
|
|
|
|
|
def open_ftp(self, url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Use FTP protocol."""
|
2006-01-24 11:51:21 -04:00
|
|
|
if not isinstance(url, str):
|
|
|
|
raise IOError, ('ftp error', 'proxy support for ftp protocol currently not implemented')
|
2004-12-31 15:15:26 -04:00
|
|
|
import mimetypes, mimetools
|
|
|
|
try:
|
|
|
|
from cStringIO import StringIO
|
|
|
|
except ImportError:
|
|
|
|
from StringIO import StringIO
|
1999-02-25 12:12:12 -04:00
|
|
|
host, path = splithost(url)
|
|
|
|
if not host: raise IOError, ('ftp error', 'no host given')
|
|
|
|
host, port = splitport(host)
|
|
|
|
user, host = splituser(host)
|
|
|
|
if user: user, passwd = splitpasswd(user)
|
|
|
|
else: passwd = None
|
|
|
|
host = unquote(host)
|
2010-11-20 07:24:08 -04:00
|
|
|
user = user or ''
|
|
|
|
passwd = passwd or ''
|
1999-02-25 12:12:12 -04:00
|
|
|
host = socket.gethostbyname(host)
|
|
|
|
if not port:
|
|
|
|
import ftplib
|
|
|
|
port = ftplib.FTP_PORT
|
|
|
|
else:
|
|
|
|
port = int(port)
|
|
|
|
path, attrs = splitattr(path)
|
|
|
|
path = unquote(path)
|
2000-12-15 11:01:37 -04:00
|
|
|
dirs = path.split('/')
|
1999-02-25 12:12:12 -04:00
|
|
|
dirs, file = dirs[:-1], dirs[-1]
|
|
|
|
if dirs and not dirs[0]: dirs = dirs[1:]
|
1999-08-18 14:40:33 -03:00
|
|
|
if dirs and not dirs[0]: dirs[0] = '/'
|
2000-12-15 11:01:37 -04:00
|
|
|
key = user, host, port, '/'.join(dirs)
|
1999-02-25 12:12:12 -04:00
|
|
|
# XXX thread unsafe!
|
|
|
|
if len(self.ftpcache) > MAXFTPCACHE:
|
|
|
|
# Prune the cache, rather arbitrarily
|
|
|
|
for k in self.ftpcache.keys():
|
|
|
|
if k != key:
|
|
|
|
v = self.ftpcache[k]
|
|
|
|
del self.ftpcache[k]
|
|
|
|
v.close()
|
|
|
|
try:
|
2002-06-01 11:18:47 -03:00
|
|
|
if not key in self.ftpcache:
|
1999-02-25 12:12:12 -04:00
|
|
|
self.ftpcache[key] = \
|
|
|
|
ftpwrapper(user, passwd, host, port, dirs)
|
|
|
|
if not file: type = 'D'
|
|
|
|
else: type = 'I'
|
|
|
|
for attr in attrs:
|
|
|
|
attr, value = splitvalue(attr)
|
2000-12-15 11:01:37 -04:00
|
|
|
if attr.lower() == 'type' and \
|
1999-02-25 12:12:12 -04:00
|
|
|
value in ('a', 'A', 'i', 'I', 'd', 'D'):
|
2000-12-15 11:01:37 -04:00
|
|
|
type = value.upper()
|
1999-02-25 12:12:12 -04:00
|
|
|
(fp, retrlen) = self.ftpcache[key].retrfile(file, type)
|
2001-08-23 10:38:15 -03:00
|
|
|
mtype = mimetypes.guess_type("ftp:" + url)[0]
|
|
|
|
headers = ""
|
|
|
|
if mtype:
|
|
|
|
headers += "Content-Type: %s\n" % mtype
|
1999-02-25 12:12:12 -04:00
|
|
|
if retrlen is not None and retrlen >= 0:
|
2001-08-23 10:38:15 -03:00
|
|
|
headers += "Content-Length: %d\n" % retrlen
|
2004-12-31 15:15:26 -04:00
|
|
|
headers = mimetools.Message(StringIO(headers))
|
1999-02-25 12:12:12 -04:00
|
|
|
return addinfourl(fp, headers, "ftp:" + url)
|
|
|
|
except ftperrors(), msg:
|
|
|
|
raise IOError, ('ftp error', msg), sys.exc_info()[2]
|
|
|
|
|
|
|
|
def open_data(self, url, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Use "data" URL."""
|
2006-01-24 11:51:21 -04:00
|
|
|
if not isinstance(url, str):
|
|
|
|
raise IOError, ('data error', 'proxy support for data protocol currently not implemented')
|
1999-02-25 12:12:12 -04:00
|
|
|
# ignore POSTed data
|
|
|
|
#
|
|
|
|
# syntax of data URLs:
|
|
|
|
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
|
|
|
|
# mediatype := [ type "/" subtype ] *( ";" parameter )
|
|
|
|
# data := *urlchar
|
|
|
|
# parameter := attribute "=" value
|
2004-12-31 15:15:26 -04:00
|
|
|
import mimetools
|
|
|
|
try:
|
|
|
|
from cStringIO import StringIO
|
|
|
|
except ImportError:
|
|
|
|
from StringIO import StringIO
|
1999-02-25 12:12:12 -04:00
|
|
|
try:
|
2000-12-15 11:01:37 -04:00
|
|
|
[type, data] = url.split(',', 1)
|
1999-02-25 12:12:12 -04:00
|
|
|
except ValueError:
|
|
|
|
raise IOError, ('data error', 'bad data URL')
|
|
|
|
if not type:
|
|
|
|
type = 'text/plain;charset=US-ASCII'
|
2000-12-15 11:01:37 -04:00
|
|
|
semi = type.rfind(';')
|
1999-02-25 12:12:12 -04:00
|
|
|
if semi >= 0 and '=' not in type[semi:]:
|
|
|
|
encoding = type[semi+1:]
|
|
|
|
type = type[:semi]
|
|
|
|
else:
|
|
|
|
encoding = ''
|
|
|
|
msg = []
|
2010-05-01 05:01:56 -03:00
|
|
|
msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
|
1999-02-25 12:12:12 -04:00
|
|
|
time.gmtime(time.time())))
|
|
|
|
msg.append('Content-type: %s' % type)
|
|
|
|
if encoding == 'base64':
|
|
|
|
data = base64.decodestring(data)
|
|
|
|
else:
|
|
|
|
data = unquote(data)
|
2006-07-26 04:40:17 -03:00
|
|
|
msg.append('Content-Length: %d' % len(data))
|
1999-02-25 12:12:12 -04:00
|
|
|
msg.append('')
|
|
|
|
msg.append(data)
|
2000-12-15 11:01:37 -04:00
|
|
|
msg = '\n'.join(msg)
|
2004-12-31 15:15:26 -04:00
|
|
|
f = StringIO(msg)
|
1999-02-25 12:12:12 -04:00
|
|
|
headers = mimetools.Message(f, 0)
|
2005-11-26 12:50:44 -04:00
|
|
|
#f.fileno = None # needed for addinfourl
|
1999-02-25 12:12:12 -04:00
|
|
|
return addinfourl(f, headers, url)
|
1998-03-12 10:32:55 -04:00
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1995-08-04 01:29:05 -03:00
|
|
|
class FancyURLopener(URLopener):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Derived class with handlers for errors we can handle (perhaps)."""
|
1995-08-04 01:29:05 -03:00
|
|
|
|
2002-06-11 10:38:51 -03:00
|
|
|
def __init__(self, *args, **kwargs):
|
2003-02-27 16:14:51 -04:00
|
|
|
URLopener.__init__(self, *args, **kwargs)
|
1999-02-25 12:12:12 -04:00
|
|
|
self.auth_cache = {}
|
2001-02-15 12:56:36 -04:00
|
|
|
self.tries = 0
|
|
|
|
self.maxtries = 10
|
1999-02-25 12:12:12 -04:00
|
|
|
|
|
|
|
def http_error_default(self, url, fp, errcode, errmsg, headers):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Default error handling -- don't raise an exception."""
|
2008-01-20 07:43:03 -04:00
|
|
|
return addinfourl(fp, headers, "http:" + url, errcode)
|
1999-02-25 12:12:12 -04:00
|
|
|
|
2000-09-24 15:51:25 -03:00
|
|
|
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Error 302 -- relocated (temporarily)."""
|
2001-02-15 12:56:36 -04:00
|
|
|
self.tries += 1
|
2016-02-04 02:01:35 -04:00
|
|
|
try:
|
|
|
|
if self.maxtries and self.tries >= self.maxtries:
|
|
|
|
if hasattr(self, "http_error_500"):
|
|
|
|
meth = self.http_error_500
|
|
|
|
else:
|
|
|
|
meth = self.http_error_default
|
|
|
|
return meth(url, fp, 500,
|
|
|
|
"Internal Server Error: Redirect Recursion",
|
|
|
|
headers)
|
|
|
|
result = self.redirect_internal(url, fp, errcode, errmsg,
|
|
|
|
headers, data)
|
|
|
|
return result
|
|
|
|
finally:
|
2001-02-15 12:56:36 -04:00
|
|
|
self.tries = 0
|
|
|
|
|
|
|
|
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
|
2002-06-01 11:18:47 -03:00
|
|
|
if 'location' in headers:
|
1999-02-25 12:12:12 -04:00
|
|
|
newurl = headers['location']
|
2002-06-01 11:18:47 -03:00
|
|
|
elif 'uri' in headers:
|
1999-02-25 12:12:12 -04:00
|
|
|
newurl = headers['uri']
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
fp.close()
|
1999-03-29 16:23:41 -04:00
|
|
|
# In case the server sent a relative URL, join with original:
|
2001-04-09 11:54:21 -03:00
|
|
|
newurl = basejoin(self.type + ":" + url, newurl)
|
2011-03-24 12:07:45 -03:00
|
|
|
|
|
|
|
# For security reasons we do not allow redirects to protocols
|
2011-03-24 14:44:17 -03:00
|
|
|
# other than HTTP, HTTPS or FTP.
|
2011-03-24 12:07:45 -03:00
|
|
|
newurl_lower = newurl.lower()
|
|
|
|
if not (newurl_lower.startswith('http://') or
|
2011-03-24 14:44:17 -03:00
|
|
|
newurl_lower.startswith('https://') or
|
|
|
|
newurl_lower.startswith('ftp://')):
|
2011-03-28 17:47:01 -03:00
|
|
|
raise IOError('redirect error', errcode,
|
|
|
|
errmsg + " - Redirection to url '%s' is not allowed" %
|
|
|
|
newurl,
|
|
|
|
headers)
|
2011-03-24 12:07:45 -03:00
|
|
|
|
2003-05-15 22:46:51 -03:00
|
|
|
return self.open(newurl)
|
1999-02-25 12:12:12 -04:00
|
|
|
|
2000-09-24 15:51:25 -03:00
|
|
|
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Error 301 -- also relocated (permanently)."""
|
|
|
|
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
|
1999-02-25 12:12:12 -04:00
|
|
|
|
2003-04-24 12:32:12 -03:00
|
|
|
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Error 303 -- also relocated (essentially identical to 302)."""
|
|
|
|
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
|
|
|
|
|
2003-05-15 22:46:51 -03:00
|
|
|
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Error 307 -- relocated, but turn POST into error."""
|
|
|
|
if data is None:
|
|
|
|
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
|
|
|
|
else:
|
|
|
|
return self.http_error_default(url, fp, errcode, errmsg, headers)
|
|
|
|
|
2000-09-24 15:51:25 -03:00
|
|
|
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Error 401 -- authentication required.
|
2006-01-24 11:51:21 -04:00
|
|
|
This function supports Basic authentication only."""
|
2002-06-01 11:18:47 -03:00
|
|
|
if not 'www-authenticate' in headers:
|
2001-02-28 04:26:44 -04:00
|
|
|
URLopener.http_error_default(self, url, fp,
|
2001-10-13 15:37:07 -03:00
|
|
|
errcode, errmsg, headers)
|
2001-02-27 02:27:04 -04:00
|
|
|
stuff = headers['www-authenticate']
|
|
|
|
import re
|
|
|
|
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
|
|
|
|
if not match:
|
2001-02-28 04:26:44 -04:00
|
|
|
URLopener.http_error_default(self, url, fp,
|
2001-02-27 02:27:04 -04:00
|
|
|
errcode, errmsg, headers)
|
|
|
|
scheme, realm = match.groups()
|
|
|
|
if scheme.lower() != 'basic':
|
2001-02-28 04:26:44 -04:00
|
|
|
URLopener.http_error_default(self, url, fp,
|
2001-02-27 02:27:04 -04:00
|
|
|
errcode, errmsg, headers)
|
|
|
|
name = 'retry_' + self.type + '_basic_auth'
|
|
|
|
if data is None:
|
|
|
|
return getattr(self,name)(url, realm)
|
|
|
|
else:
|
|
|
|
return getattr(self,name)(url, realm, data)
|
2006-01-24 18:44:08 -04:00
|
|
|
|
2006-01-24 11:51:21 -04:00
|
|
|
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None):
|
|
|
|
"""Error 407 -- proxy authentication required.
|
|
|
|
This function supports Basic authentication only."""
|
|
|
|
if not 'proxy-authenticate' in headers:
|
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
|
|
|
stuff = headers['proxy-authenticate']
|
|
|
|
import re
|
|
|
|
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
|
|
|
|
if not match:
|
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
|
|
|
scheme, realm = match.groups()
|
|
|
|
if scheme.lower() != 'basic':
|
|
|
|
URLopener.http_error_default(self, url, fp,
|
|
|
|
errcode, errmsg, headers)
|
|
|
|
name = 'retry_proxy_' + self.type + '_basic_auth'
|
|
|
|
if data is None:
|
|
|
|
return getattr(self,name)(url, realm)
|
|
|
|
else:
|
|
|
|
return getattr(self,name)(url, realm, data)
|
2006-01-24 18:44:08 -04:00
|
|
|
|
2006-01-24 11:51:21 -04:00
|
|
|
def retry_proxy_http_basic_auth(self, url, realm, data=None):
|
|
|
|
host, selector = splithost(url)
|
|
|
|
newurl = 'http://' + host + selector
|
|
|
|
proxy = self.proxies['http']
|
|
|
|
urltype, proxyhost = splittype(proxy)
|
|
|
|
proxyhost, proxyselector = splithost(proxyhost)
|
|
|
|
i = proxyhost.find('@') + 1
|
|
|
|
proxyhost = proxyhost[i:]
|
|
|
|
user, passwd = self.get_user_passwd(proxyhost, realm, i)
|
|
|
|
if not (user or passwd): return None
|
|
|
|
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
|
|
|
|
self.proxies['http'] = 'http://' + proxyhost + proxyselector
|
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
1999-02-25 12:12:12 -04:00
|
|
|
|
2006-01-24 11:51:21 -04:00
|
|
|
def retry_proxy_https_basic_auth(self, url, realm, data=None):
|
|
|
|
host, selector = splithost(url)
|
|
|
|
newurl = 'https://' + host + selector
|
|
|
|
proxy = self.proxies['https']
|
|
|
|
urltype, proxyhost = splittype(proxy)
|
|
|
|
proxyhost, proxyselector = splithost(proxyhost)
|
|
|
|
i = proxyhost.find('@') + 1
|
|
|
|
proxyhost = proxyhost[i:]
|
|
|
|
user, passwd = self.get_user_passwd(proxyhost, realm, i)
|
|
|
|
if not (user or passwd): return None
|
|
|
|
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
|
|
|
|
self.proxies['https'] = 'https://' + proxyhost + proxyselector
|
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
2006-01-24 18:44:08 -04:00
|
|
|
|
2000-02-01 19:36:55 -04:00
|
|
|
def retry_http_basic_auth(self, url, realm, data=None):
|
1999-02-25 12:12:12 -04:00
|
|
|
host, selector = splithost(url)
|
2000-12-15 11:01:37 -04:00
|
|
|
i = host.find('@') + 1
|
1999-02-25 12:12:12 -04:00
|
|
|
host = host[i:]
|
|
|
|
user, passwd = self.get_user_passwd(host, realm, i)
|
|
|
|
if not (user or passwd): return None
|
2001-01-15 14:31:13 -04:00
|
|
|
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
|
1999-02-25 12:12:12 -04:00
|
|
|
newurl = 'http://' + host + selector
|
2000-02-01 19:36:55 -04:00
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
2000-09-24 15:51:25 -03:00
|
|
|
|
2000-02-01 19:36:55 -04:00
|
|
|
def retry_https_basic_auth(self, url, realm, data=None):
|
2001-01-14 23:34:38 -04:00
|
|
|
host, selector = splithost(url)
|
|
|
|
i = host.find('@') + 1
|
|
|
|
host = host[i:]
|
|
|
|
user, passwd = self.get_user_passwd(host, realm, i)
|
|
|
|
if not (user or passwd): return None
|
2001-01-15 14:31:13 -04:00
|
|
|
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
|
2006-01-24 11:51:21 -04:00
|
|
|
newurl = 'https://' + host + selector
|
|
|
|
if data is None:
|
|
|
|
return self.open(newurl)
|
|
|
|
else:
|
|
|
|
return self.open(newurl, data)
|
1999-02-25 12:12:12 -04:00
|
|
|
|
2010-05-17 07:39:07 -03:00
|
|
|
def get_user_passwd(self, host, realm, clear_cache=0):
|
2000-12-15 11:01:37 -04:00
|
|
|
key = realm + '@' + host.lower()
|
2002-06-01 11:18:47 -03:00
|
|
|
if key in self.auth_cache:
|
1999-02-25 12:12:12 -04:00
|
|
|
if clear_cache:
|
|
|
|
del self.auth_cache[key]
|
|
|
|
else:
|
|
|
|
return self.auth_cache[key]
|
|
|
|
user, passwd = self.prompt_user_passwd(host, realm)
|
|
|
|
if user or passwd: self.auth_cache[key] = (user, passwd)
|
|
|
|
return user, passwd
|
|
|
|
|
|
|
|
def prompt_user_passwd(self, host, realm):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Override this in a GUI environment!"""
|
1999-02-25 12:12:12 -04:00
|
|
|
import getpass
|
|
|
|
try:
|
|
|
|
user = raw_input("Enter username for %s at %s: " % (realm,
|
2000-09-24 15:51:25 -03:00
|
|
|
host))
|
1999-02-25 12:12:12 -04:00
|
|
|
passwd = getpass.getpass("Enter password for %s in %s at %s: " %
|
|
|
|
(user, realm, host))
|
|
|
|
return user, passwd
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print
|
|
|
|
return None, None
|
1995-08-04 01:29:05 -03:00
|
|
|
|
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
# Utility functions
|
|
|
|
|
|
|
|
_localhost = None
|
|
|
|
def localhost():
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Return the IP address of the magic hostname 'localhost'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _localhost
|
2002-06-02 00:04:52 -03:00
|
|
|
if _localhost is None:
|
1999-02-25 12:12:12 -04:00
|
|
|
_localhost = socket.gethostbyname('localhost')
|
|
|
|
return _localhost
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
_thishost = None
|
|
|
|
def thishost():
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Return the IP address of the current host."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _thishost
|
2002-06-02 00:04:52 -03:00
|
|
|
if _thishost is None:
|
2013-06-01 15:11:30 -03:00
|
|
|
try:
|
|
|
|
_thishost = socket.gethostbyname(socket.gethostname())
|
|
|
|
except socket.gaierror:
|
|
|
|
_thishost = socket.gethostbyname('localhost')
|
1999-02-25 12:12:12 -04:00
|
|
|
return _thishost
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
_ftperrors = None
|
|
|
|
def ftperrors():
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Return the set of errors raised by the FTP class."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _ftperrors
|
2002-06-02 00:04:52 -03:00
|
|
|
if _ftperrors is None:
|
1999-02-25 12:12:12 -04:00
|
|
|
import ftplib
|
|
|
|
_ftperrors = ftplib.all_errors
|
|
|
|
return _ftperrors
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
_noheaders = None
|
|
|
|
def noheaders():
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Return an empty mimetools.Message object."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _noheaders
|
2002-06-02 00:04:52 -03:00
|
|
|
if _noheaders is None:
|
1999-02-25 12:12:12 -04:00
|
|
|
import mimetools
|
2004-12-31 15:15:26 -04:00
|
|
|
try:
|
|
|
|
from cStringIO import StringIO
|
|
|
|
except ImportError:
|
|
|
|
from StringIO import StringIO
|
|
|
|
_noheaders = mimetools.Message(StringIO(), 0)
|
1999-02-25 12:12:12 -04:00
|
|
|
_noheaders.fp.close() # Recycle file descriptor
|
|
|
|
return _noheaders
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
|
|
|
|
# Utility classes
|
|
|
|
|
|
|
|
class ftpwrapper:
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Class used by open_ftp() for cache of open FTP connections."""
|
|
|
|
|
2008-05-29 13:39:26 -03:00
|
|
|
def __init__(self, user, passwd, host, port, dirs,
|
2011-07-23 10:51:16 -03:00
|
|
|
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
2011-07-23 12:04:42 -03:00
|
|
|
persistent=True):
|
1999-02-25 12:12:12 -04:00
|
|
|
self.user = user
|
|
|
|
self.passwd = passwd
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.dirs = dirs
|
2007-05-24 14:50:54 -03:00
|
|
|
self.timeout = timeout
|
2011-07-23 10:51:16 -03:00
|
|
|
self.refcount = 0
|
|
|
|
self.keepalive = persistent
|
2015-04-07 07:47:57 -03:00
|
|
|
try:
|
|
|
|
self.init()
|
|
|
|
except:
|
|
|
|
self.close()
|
|
|
|
raise
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def init(self):
|
|
|
|
import ftplib
|
|
|
|
self.busy = 0
|
|
|
|
self.ftp = ftplib.FTP()
|
2007-05-24 14:50:54 -03:00
|
|
|
self.ftp.connect(self.host, self.port, self.timeout)
|
1999-02-25 12:12:12 -04:00
|
|
|
self.ftp.login(self.user, self.passwd)
|
2013-06-02 15:59:09 -03:00
|
|
|
_target = '/'.join(self.dirs)
|
|
|
|
self.ftp.cwd(_target)
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def retrfile(self, file, type):
|
|
|
|
import ftplib
|
|
|
|
self.endtransfer()
|
|
|
|
if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
|
|
|
|
else: cmd = 'TYPE ' + type; isdir = 0
|
|
|
|
try:
|
|
|
|
self.ftp.voidcmd(cmd)
|
|
|
|
except ftplib.all_errors:
|
|
|
|
self.init()
|
|
|
|
self.ftp.voidcmd(cmd)
|
|
|
|
conn = None
|
|
|
|
if file and not isdir:
|
|
|
|
# Try to retrieve as a file
|
|
|
|
try:
|
|
|
|
cmd = 'RETR ' + file
|
2011-07-23 10:51:16 -03:00
|
|
|
conn, retrlen = self.ftp.ntransfercmd(cmd)
|
1999-02-25 12:12:12 -04:00
|
|
|
except ftplib.error_perm, reason:
|
2000-12-15 11:01:37 -04:00
|
|
|
if str(reason)[:3] != '550':
|
1999-02-25 12:12:12 -04:00
|
|
|
raise IOError, ('ftp error', reason), sys.exc_info()[2]
|
|
|
|
if not conn:
|
|
|
|
# Set transfer mode to ASCII!
|
|
|
|
self.ftp.voidcmd('TYPE A')
|
2008-01-20 08:18:17 -04:00
|
|
|
# Try a directory listing. Verify that directory exists.
|
|
|
|
if file:
|
|
|
|
pwd = self.ftp.pwd()
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
self.ftp.cwd(file)
|
|
|
|
except ftplib.error_perm, reason:
|
|
|
|
raise IOError, ('ftp error', reason), sys.exc_info()[2]
|
|
|
|
finally:
|
|
|
|
self.ftp.cwd(pwd)
|
|
|
|
cmd = 'LIST ' + file
|
|
|
|
else:
|
|
|
|
cmd = 'LIST'
|
2011-07-23 10:51:16 -03:00
|
|
|
conn, retrlen = self.ftp.ntransfercmd(cmd)
|
1999-02-25 12:12:12 -04:00
|
|
|
self.busy = 1
|
2011-07-23 10:51:16 -03:00
|
|
|
ftpobj = addclosehook(conn.makefile('rb'), self.file_close)
|
|
|
|
self.refcount += 1
|
|
|
|
conn.close()
|
1999-02-25 12:12:12 -04:00
|
|
|
# Pass back both a suitably decorated object and a retrieval length
|
2011-07-23 10:51:16 -03:00
|
|
|
return (ftpobj, retrlen)
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def endtransfer(self):
|
|
|
|
self.busy = 0
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def close(self):
|
2011-07-23 10:51:16 -03:00
|
|
|
self.keepalive = False
|
|
|
|
if self.refcount <= 0:
|
|
|
|
self.real_close()
|
|
|
|
|
|
|
|
def file_close(self):
|
|
|
|
self.endtransfer()
|
|
|
|
self.refcount -= 1
|
|
|
|
if self.refcount <= 0 and not self.keepalive:
|
|
|
|
self.real_close()
|
|
|
|
|
|
|
|
def real_close(self):
|
1999-02-25 12:12:12 -04:00
|
|
|
self.endtransfer()
|
|
|
|
try:
|
|
|
|
self.ftp.close()
|
|
|
|
except ftperrors():
|
|
|
|
pass
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
class addbase:
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Base class for addinfo and addclosehook."""
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def __init__(self, fp):
|
|
|
|
self.fp = fp
|
|
|
|
self.read = self.fp.read
|
|
|
|
self.readline = self.fp.readline
|
1999-12-07 17:37:17 -04:00
|
|
|
if hasattr(self.fp, "readlines"): self.readlines = self.fp.readlines
|
2005-11-26 12:50:44 -04:00
|
|
|
if hasattr(self.fp, "fileno"):
|
|
|
|
self.fileno = self.fp.fileno
|
|
|
|
else:
|
|
|
|
self.fileno = lambda: None
|
2003-03-09 01:33:33 -04:00
|
|
|
if hasattr(self.fp, "__iter__"):
|
|
|
|
self.__iter__ = self.fp.__iter__
|
|
|
|
if hasattr(self.fp, "next"):
|
|
|
|
self.next = self.fp.next
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def __repr__(self):
|
2004-02-12 13:35:32 -04:00
|
|
|
return '<%s at %r whose fp = %r>' % (self.__class__.__name__,
|
|
|
|
id(self), self.fp)
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def close(self):
|
|
|
|
self.read = None
|
|
|
|
self.readline = None
|
|
|
|
self.readlines = None
|
|
|
|
self.fileno = None
|
|
|
|
if self.fp: self.fp.close()
|
|
|
|
self.fp = None
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
class addclosehook(addbase):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Class to add a close hook to an open file."""
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def __init__(self, fp, closehook, *hookargs):
|
|
|
|
addbase.__init__(self, fp)
|
|
|
|
self.closehook = closehook
|
|
|
|
self.hookargs = hookargs
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def close(self):
|
2015-04-10 07:24:10 -03:00
|
|
|
try:
|
|
|
|
closehook = self.closehook
|
|
|
|
hookargs = self.hookargs
|
|
|
|
if closehook:
|
|
|
|
self.closehook = None
|
|
|
|
self.hookargs = None
|
|
|
|
closehook(*hookargs)
|
|
|
|
finally:
|
|
|
|
addbase.close(self)
|
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
class addinfo(addbase):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""class to add an info() method to an open file."""
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def __init__(self, fp, headers):
|
|
|
|
addbase.__init__(self, fp)
|
|
|
|
self.headers = headers
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def info(self):
|
|
|
|
return self.headers
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1996-09-10 14:02:56 -03:00
|
|
|
class addinfourl(addbase):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""class to add info() and geturl() methods to an open file."""
|
|
|
|
|
2008-01-20 07:43:03 -04:00
|
|
|
def __init__(self, fp, headers, url, code=None):
|
1999-02-25 12:12:12 -04:00
|
|
|
addbase.__init__(self, fp)
|
|
|
|
self.headers = headers
|
|
|
|
self.url = url
|
2008-01-20 07:43:03 -04:00
|
|
|
self.code = code
|
2000-02-04 11:28:42 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def info(self):
|
|
|
|
return self.headers
|
2000-02-04 11:28:42 -04:00
|
|
|
|
2008-01-20 07:43:03 -04:00
|
|
|
def getcode(self):
|
|
|
|
return self.code
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
def geturl(self):
|
|
|
|
return self.url
|
1996-09-10 14:02:56 -03:00
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1994-07-04 19:14:49 -03:00
|
|
|
# Utilities to parse URLs (most of these return None for missing parts):
|
1995-11-10 06:36:07 -04:00
|
|
|
# unwrap('<URL:type://host/path>') --> 'type://host/path'
|
1994-03-22 08:05:32 -04:00
|
|
|
# splittype('type:opaquestring') --> 'type', 'opaquestring'
|
|
|
|
# splithost('//host[:port]/path') --> 'host[:port]', '/path'
|
1994-07-04 19:14:49 -03:00
|
|
|
# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
|
|
|
|
# splitpasswd('user:passwd') -> 'user', 'passwd'
|
1994-03-22 08:05:32 -04:00
|
|
|
# splitport('host:port') --> 'host', 'port'
|
|
|
|
# splitquery('/path?query') --> '/path', 'query'
|
|
|
|
# splittag('/path#tag') --> '/path', 'tag'
|
1994-07-04 19:14:49 -03:00
|
|
|
# splitattr('/path;attr1=value1;attr2=value2;...') ->
|
|
|
|
# '/path', ['attr1=value1', 'attr2=value2', ...]
|
|
|
|
# splitvalue('attr=value') --> 'attr', 'value'
|
1994-03-22 08:05:32 -04:00
|
|
|
# unquote('abc%20def') -> 'abc def'
|
|
|
|
# quote('abc def') -> 'abc%20def')
|
|
|
|
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
try:
|
|
|
|
unicode
|
|
|
|
except NameError:
|
2002-05-24 14:58:05 -03:00
|
|
|
def _is_unicode(x):
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
return 0
|
2002-05-24 14:58:05 -03:00
|
|
|
else:
|
|
|
|
def _is_unicode(x):
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
return isinstance(x, unicode)
|
2002-05-24 14:58:05 -03:00
|
|
|
|
2000-12-03 14:30:10 -04:00
|
|
|
def toBytes(url):
|
|
|
|
"""toBytes(u"URL") --> 'URL'."""
|
|
|
|
# Most URL schemes require ASCII. If that changes, the conversion
|
|
|
|
# can be relaxed
|
2002-05-24 14:58:05 -03:00
|
|
|
if _is_unicode(url):
|
2000-12-03 14:30:10 -04:00
|
|
|
try:
|
|
|
|
url = url.encode("ASCII")
|
|
|
|
except UnicodeError:
|
2000-12-15 11:01:37 -04:00
|
|
|
raise UnicodeError("URL " + repr(url) +
|
|
|
|
" contains non-ASCII characters")
|
2000-12-03 14:30:10 -04:00
|
|
|
return url
|
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
def unwrap(url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""unwrap('<URL:type://host/path>') --> 'type://host/path'."""
|
2000-12-15 11:01:37 -04:00
|
|
|
url = url.strip()
|
1999-02-25 12:12:12 -04:00
|
|
|
if url[:1] == '<' and url[-1:] == '>':
|
2000-12-15 11:01:37 -04:00
|
|
|
url = url[1:-1].strip()
|
|
|
|
if url[:4] == 'URL:': url = url[4:].strip()
|
1999-02-25 12:12:12 -04:00
|
|
|
return url
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_typeprog = None
|
1994-03-22 08:05:32 -04:00
|
|
|
def splittype(url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splittype('type:opaquestring') --> 'type', 'opaquestring'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _typeprog
|
|
|
|
if _typeprog is None:
|
|
|
|
import re
|
|
|
|
_typeprog = re.compile('^([^/:]+):')
|
1997-09-29 20:23:46 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
match = _typeprog.match(url)
|
|
|
|
if match:
|
|
|
|
scheme = match.group(1)
|
2000-07-01 04:03:30 -03:00
|
|
|
return scheme.lower(), url[len(scheme) + 1:]
|
1999-02-25 12:12:12 -04:00
|
|
|
return None, url
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_hostprog = None
|
1994-03-22 08:05:32 -04:00
|
|
|
def splithost(url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _hostprog
|
|
|
|
if _hostprog is None:
|
|
|
|
import re
|
2006-03-26 16:59:38 -04:00
|
|
|
_hostprog = re.compile('^//([^/?]*)(.*)$')
|
1997-09-29 20:23:46 -03:00
|
|
|
|
2000-09-24 15:51:25 -03:00
|
|
|
match = _hostprog.match(url)
|
2010-11-22 01:04:33 -04:00
|
|
|
if match:
|
|
|
|
host_port = match.group(1)
|
|
|
|
path = match.group(2)
|
|
|
|
if path and not path.startswith('/'):
|
|
|
|
path = '/' + path
|
|
|
|
return host_port, path
|
1999-02-25 12:12:12 -04:00
|
|
|
return None, url
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_userprog = None
|
1994-07-04 19:14:49 -03:00
|
|
|
def splituser(host):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _userprog
|
|
|
|
if _userprog is None:
|
|
|
|
import re
|
2002-08-18 17:08:56 -03:00
|
|
|
_userprog = re.compile('^(.*)@(.*)$')
|
1997-09-29 20:23:46 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
match = _userprog.match(host)
|
2010-11-20 07:24:08 -04:00
|
|
|
if match: return match.group(1, 2)
|
1999-02-25 12:12:12 -04:00
|
|
|
return None, host
|
1994-07-04 19:14:49 -03:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_passwdprog = None
|
1994-07-04 19:14:49 -03:00
|
|
|
def splitpasswd(user):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splitpasswd('user:passwd') -> 'user', 'passwd'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _passwdprog
|
|
|
|
if _passwdprog is None:
|
|
|
|
import re
|
2009-03-30 18:51:50 -03:00
|
|
|
_passwdprog = re.compile('^([^:]*):(.*)$',re.S)
|
1997-09-29 20:23:46 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
match = _passwdprog.match(user)
|
|
|
|
if match: return match.group(1, 2)
|
|
|
|
return user, None
|
1994-07-04 19:14:49 -03:00
|
|
|
|
2000-02-04 11:28:42 -04:00
|
|
|
# splittag('/path#tag') --> '/path', 'tag'
|
1997-09-29 20:23:46 -03:00
|
|
|
_portprog = None
|
1994-03-22 08:05:32 -04:00
|
|
|
def splitport(host):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splitport('host:port') --> 'host', 'port'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _portprog
|
|
|
|
if _portprog is None:
|
|
|
|
import re
|
2014-01-18 12:30:09 -04:00
|
|
|
_portprog = re.compile('^(.*):([0-9]*)$')
|
1997-09-29 20:23:46 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
match = _portprog.match(host)
|
2014-01-18 12:30:09 -04:00
|
|
|
if match:
|
|
|
|
host, port = match.groups()
|
|
|
|
if port:
|
|
|
|
return host, port
|
1999-02-25 12:12:12 -04:00
|
|
|
return host, None
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_nportprog = None
|
1996-06-13 16:12:35 -03:00
|
|
|
def splitnport(host, defport=-1):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""Split host and port, returning numeric port.
|
|
|
|
Return given default port if no ':' found; defaults to -1.
|
|
|
|
Return numerical port if a valid number are found after ':'.
|
|
|
|
Return None if ':' but not a valid number."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _nportprog
|
|
|
|
if _nportprog is None:
|
|
|
|
import re
|
|
|
|
_nportprog = re.compile('^(.*):(.*)$')
|
|
|
|
|
|
|
|
match = _nportprog.match(host)
|
|
|
|
if match:
|
|
|
|
host, port = match.group(1, 2)
|
2014-01-18 12:30:09 -04:00
|
|
|
if port:
|
|
|
|
try:
|
|
|
|
nport = int(port)
|
|
|
|
except ValueError:
|
|
|
|
nport = None
|
|
|
|
return host, nport
|
1999-02-25 12:12:12 -04:00
|
|
|
return host, defport
|
1996-06-13 16:12:35 -03:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_queryprog = None
|
1994-03-22 08:05:32 -04:00
|
|
|
def splitquery(url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splitquery('/path?query') --> '/path', 'query'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _queryprog
|
|
|
|
if _queryprog is None:
|
|
|
|
import re
|
|
|
|
_queryprog = re.compile('^(.*)\?([^?]*)$')
|
1997-09-29 20:23:46 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
match = _queryprog.match(url)
|
|
|
|
if match: return match.group(1, 2)
|
|
|
|
return url, None
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_tagprog = None
|
1994-03-22 08:05:32 -04:00
|
|
|
def splittag(url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splittag('/path#tag') --> '/path', 'tag'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _tagprog
|
|
|
|
if _tagprog is None:
|
|
|
|
import re
|
|
|
|
_tagprog = re.compile('^(.*)#([^#]*)$')
|
1998-03-26 17:01:39 -04:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
match = _tagprog.match(url)
|
|
|
|
if match: return match.group(1, 2)
|
|
|
|
return url, None
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1994-07-04 19:14:49 -03:00
|
|
|
def splitattr(url):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splitattr('/path;attr1=value1;attr2=value2;...') ->
|
|
|
|
'/path', ['attr1=value1', 'attr2=value2', ...]."""
|
2000-12-15 11:01:37 -04:00
|
|
|
words = url.split(';')
|
1999-02-25 12:12:12 -04:00
|
|
|
return words[0], words[1:]
|
1994-07-04 19:14:49 -03:00
|
|
|
|
1997-09-29 20:23:46 -03:00
|
|
|
_valueprog = None
|
1994-07-04 19:14:49 -03:00
|
|
|
def splitvalue(attr):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""splitvalue('attr=value') --> 'attr', 'value'."""
|
1999-02-25 12:12:12 -04:00
|
|
|
global _valueprog
|
|
|
|
if _valueprog is None:
|
|
|
|
import re
|
|
|
|
_valueprog = re.compile('^([^=]*)=(.*)$')
|
1997-09-29 20:23:46 -03:00
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
match = _valueprog.match(attr)
|
|
|
|
if match: return match.group(1, 2)
|
|
|
|
return attr, None
|
1994-07-04 19:14:49 -03:00
|
|
|
|
2010-05-25 12:20:46 -03:00
|
|
|
# urlparse contains a duplicate of this method to avoid a circular import. If
|
|
|
|
# you update this method, also update the copy in urlparse. This code
|
|
|
|
# duplication does not exist in Python3.
|
|
|
|
|
2010-03-18 09:14:15 -03:00
|
|
|
_hexdig = '0123456789ABCDEFabcdef'
|
2010-05-17 07:39:07 -03:00
|
|
|
_hextochr = dict((a + b, chr(int(a + b, 16)))
|
|
|
|
for a in _hexdig for b in _hexdig)
|
2013-03-14 16:31:09 -03:00
|
|
|
_asciire = re.compile('([\x00-\x7f]+)')
|
2005-09-10 03:49:04 -03:00
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
def unquote(s):
|
2000-02-04 11:28:42 -04:00
|
|
|
"""unquote('abc%20def') -> 'abc def'."""
|
2013-03-14 16:31:09 -03:00
|
|
|
if _is_unicode(s):
|
|
|
|
if '%' not in s:
|
|
|
|
return s
|
|
|
|
bits = _asciire.split(s)
|
|
|
|
res = [bits[0]]
|
|
|
|
append = res.append
|
|
|
|
for i in range(1, len(bits), 2):
|
|
|
|
append(unquote(str(bits[i])).decode('latin1'))
|
|
|
|
append(bits[i + 1])
|
|
|
|
return ''.join(res)
|
|
|
|
|
|
|
|
bits = s.split('%')
|
2010-05-17 10:35:09 -03:00
|
|
|
# fastpath
|
2013-03-14 16:31:09 -03:00
|
|
|
if len(bits) == 1:
|
2010-05-17 10:35:09 -03:00
|
|
|
return s
|
2013-03-14 16:31:09 -03:00
|
|
|
res = [bits[0]]
|
|
|
|
append = res.append
|
|
|
|
for item in bits[1:]:
|
2005-09-10 03:49:04 -03:00
|
|
|
try:
|
2013-03-14 16:31:09 -03:00
|
|
|
append(_hextochr[item[:2]])
|
|
|
|
append(item[2:])
|
2005-09-10 03:49:04 -03:00
|
|
|
except KeyError:
|
2013-03-14 16:31:09 -03:00
|
|
|
append('%')
|
|
|
|
append(item)
|
|
|
|
return ''.join(res)
|
1994-03-22 08:05:32 -04:00
|
|
|
|
1996-12-13 10:47:36 -04:00
|
|
|
def unquote_plus(s):
|
2000-08-22 00:00:52 -03:00
|
|
|
"""unquote('%7e/abc+def') -> '~/abc def'"""
|
2004-03-23 19:50:17 -04:00
|
|
|
s = s.replace('+', ' ')
|
1999-02-25 12:12:12 -04:00
|
|
|
return unquote(s)
|
1996-12-13 10:47:36 -04:00
|
|
|
|
2000-09-24 15:51:25 -03:00
|
|
|
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
2000-08-31 12:48:10 -03:00
|
|
|
'abcdefghijklmnopqrstuvwxyz'
|
2000-09-14 13:59:07 -03:00
|
|
|
'0123456789' '_.-')
|
2010-05-17 10:35:09 -03:00
|
|
|
_safe_map = {}
|
|
|
|
for i, c in zip(xrange(256), str(bytearray(xrange(256)))):
|
|
|
|
_safe_map[c] = c if (i < 128 and c in always_safe) else '%{:02X}'.format(i)
|
|
|
|
_safe_quoters = {}
|
2000-09-14 13:59:07 -03:00
|
|
|
|
2010-07-21 22:47:30 -03:00
|
|
|
def quote(s, safe='/'):
|
2000-09-14 13:59:07 -03:00
|
|
|
"""quote('abc def') -> 'abc%20def'
|
2000-09-24 15:51:25 -03:00
|
|
|
|
2000-09-14 13:59:07 -03:00
|
|
|
Each part of a URL, e.g. the path info, the query, etc., has a
|
|
|
|
different set of reserved characters that must be quoted.
|
|
|
|
|
|
|
|
RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
|
|
|
|
the following reserved characters.
|
|
|
|
|
|
|
|
reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
|
|
|
|
"$" | ","
|
|
|
|
|
|
|
|
Each of these characters is reserved in some component of a URL,
|
|
|
|
but not necessarily in all of them.
|
|
|
|
|
|
|
|
By default, the quote function is intended for quoting the path
|
|
|
|
section of a URL. Thus, it will not encode '/'. This character
|
|
|
|
is reserved, but in typical usage the quote function is being
|
|
|
|
called on a path where the existing slash characters are used as
|
|
|
|
reserved characters.
|
|
|
|
"""
|
2010-05-17 10:35:09 -03:00
|
|
|
# fastpath
|
|
|
|
if not s:
|
2010-07-19 14:35:50 -03:00
|
|
|
if s is None:
|
|
|
|
raise TypeError('None object cannot be quoted')
|
2010-05-17 10:35:09 -03:00
|
|
|
return s
|
2005-09-09 19:27:13 -03:00
|
|
|
cachekey = (safe, always_safe)
|
|
|
|
try:
|
2010-05-17 10:35:09 -03:00
|
|
|
(quoter, safe) = _safe_quoters[cachekey]
|
2005-09-09 19:27:13 -03:00
|
|
|
except KeyError:
|
2010-05-17 10:35:09 -03:00
|
|
|
safe_map = _safe_map.copy()
|
|
|
|
safe_map.update([(c, c) for c in safe])
|
|
|
|
quoter = safe_map.__getitem__
|
|
|
|
safe = always_safe + safe
|
|
|
|
_safe_quoters[cachekey] = (quoter, safe)
|
|
|
|
if not s.rstrip(safe):
|
|
|
|
return s
|
|
|
|
return ''.join(map(quoter, s))
|
1994-03-22 08:05:32 -04:00
|
|
|
|
2010-07-21 22:47:30 -03:00
|
|
|
def quote_plus(s, safe=''):
|
2000-09-14 13:59:07 -03:00
|
|
|
"""Quote the query fragment of a URL; replacing ' ' with '+'"""
|
1999-02-25 12:12:12 -04:00
|
|
|
if ' ' in s:
|
2010-07-21 22:47:30 -03:00
|
|
|
s = quote(s, safe + ' ')
|
2005-09-10 15:17:54 -03:00
|
|
|
return s.replace(' ', '+')
|
2010-07-21 22:47:30 -03:00
|
|
|
return quote(s, safe)
|
1996-12-13 10:47:36 -04:00
|
|
|
|
2010-05-17 07:39:07 -03:00
|
|
|
def urlencode(query, doseq=0):
|
2001-01-28 17:11:12 -04:00
|
|
|
"""Encode a sequence of two-element tuples or dictionary into a URL query string.
|
2001-01-20 11:56:39 -04:00
|
|
|
|
2001-01-28 17:11:12 -04:00
|
|
|
If any values in the query arg are sequences and doseq is true, each
|
2001-01-20 11:56:39 -04:00
|
|
|
sequence element is converted to a separate parameter.
|
2001-01-28 17:11:12 -04:00
|
|
|
|
|
|
|
If the query arg is a sequence of two-element tuples, the order of the
|
|
|
|
parameters in the output will match the order of parameters in the
|
|
|
|
input.
|
2001-01-20 11:56:39 -04:00
|
|
|
"""
|
2001-02-09 16:06:00 -04:00
|
|
|
|
2001-01-28 17:11:12 -04:00
|
|
|
if hasattr(query,"items"):
|
|
|
|
# mapping objects
|
|
|
|
query = query.items()
|
|
|
|
else:
|
|
|
|
# it's a bother at times that strings and string-like objects are
|
|
|
|
# sequences...
|
|
|
|
try:
|
|
|
|
# non-sequence items should not work with len()
|
|
|
|
# non-empty strings will fail this
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
if len(query) and not isinstance(query[0], tuple):
|
2001-01-28 17:11:12 -04:00
|
|
|
raise TypeError
|
|
|
|
# zero-length sequences of all types will get here and succeed,
|
|
|
|
# but that's a minor nit - since the original implementation
|
|
|
|
# allowed empty dicts that type of behavior probably should be
|
|
|
|
# preserved for consistency
|
|
|
|
except TypeError:
|
|
|
|
ty,va,tb = sys.exc_info()
|
|
|
|
raise TypeError, "not a valid non-string sequence or mapping object", tb
|
|
|
|
|
2000-02-04 11:28:42 -04:00
|
|
|
l = []
|
2001-01-20 11:56:39 -04:00
|
|
|
if not doseq:
|
|
|
|
# preserve old behavior
|
2001-01-28 17:11:12 -04:00
|
|
|
for k, v in query:
|
2001-01-20 11:56:39 -04:00
|
|
|
k = quote_plus(str(k))
|
|
|
|
v = quote_plus(str(v))
|
|
|
|
l.append(k + '=' + v)
|
|
|
|
else:
|
2001-01-28 17:11:12 -04:00
|
|
|
for k, v in query:
|
2001-01-20 11:56:39 -04:00
|
|
|
k = quote_plus(str(k))
|
Remove uses of the string and types modules:
x in string.whitespace => x.isspace()
type(x) in types.StringTypes => isinstance(x, basestring)
isinstance(x, types.StringTypes) => isinstance(x, basestring)
type(x) is types.StringType => isinstance(x, str)
type(x) == types.StringType => isinstance(x, str)
string.split(x, ...) => x.split(...)
string.join(x, y) => y.join(x)
string.zfill(x, ...) => x.zfill(...)
string.count(x, ...) => x.count(...)
hasattr(types, "UnicodeType") => try: unicode except NameError:
type(x) != types.TupleTuple => not isinstance(x, tuple)
isinstance(x, types.TupleType) => isinstance(x, tuple)
type(x) is types.IntType => isinstance(x, int)
Do not mention the string module in the rlcompleter docstring.
This partially applies SF patch http://www.python.org/sf/562373
(with basestring instead of string). (It excludes the changes to
unittest.py and does not change the os.stat stuff.)
2002-06-03 12:58:32 -03:00
|
|
|
if isinstance(v, str):
|
2001-01-20 11:56:39 -04:00
|
|
|
v = quote_plus(v)
|
|
|
|
l.append(k + '=' + v)
|
2002-05-24 14:58:05 -03:00
|
|
|
elif _is_unicode(v):
|
2001-01-20 11:56:39 -04:00
|
|
|
# is there a reasonable way to convert to ASCII?
|
|
|
|
# encode generates a string, but "replace" or "ignore"
|
|
|
|
# lose information and "strict" can raise UnicodeError
|
|
|
|
v = quote_plus(v.encode("ASCII","replace"))
|
|
|
|
l.append(k + '=' + v)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
# is this a sufficient test for sequence-ness?
|
2010-02-06 18:59:15 -04:00
|
|
|
len(v)
|
2001-01-20 11:56:39 -04:00
|
|
|
except TypeError:
|
|
|
|
# not a sequence
|
|
|
|
v = quote_plus(str(v))
|
|
|
|
l.append(k + '=' + v)
|
|
|
|
else:
|
|
|
|
# loop over the sequence
|
|
|
|
for elt in v:
|
|
|
|
l.append(k + '=' + quote_plus(str(elt)))
|
2000-12-15 11:01:37 -04:00
|
|
|
return '&'.join(l)
|
1998-07-22 18:33:23 -03:00
|
|
|
|
1996-03-20 11:33:11 -04:00
|
|
|
# Proxy handling
|
2000-07-26 04:04:38 -03:00
|
|
|
def getproxies_environment():
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
Scan the environment for variables named <scheme>_proxy;
|
2016-04-25 13:17:54 -03:00
|
|
|
this seems to be the standard convention. In order to prefer lowercase
|
|
|
|
variables, we process the environment in two passes, first matches any
|
|
|
|
and second matches only lower case proxies.
|
2000-07-26 04:04:38 -03:00
|
|
|
|
2016-04-25 13:17:54 -03:00
|
|
|
If you need a different way, you can pass a proxies dictionary to the
|
|
|
|
[Fancy]URLopener constructor.
|
2000-07-26 04:04:38 -03:00
|
|
|
"""
|
2016-07-30 09:49:53 -03:00
|
|
|
# Get all variables
|
2000-07-26 04:04:38 -03:00
|
|
|
proxies = {}
|
|
|
|
for name, value in os.environ.items():
|
2000-12-15 11:01:37 -04:00
|
|
|
name = name.lower()
|
2000-07-26 04:04:38 -03:00
|
|
|
if value and name[-6:] == '_proxy':
|
|
|
|
proxies[name[:-6]] = value
|
2016-04-25 13:17:54 -03:00
|
|
|
|
2016-07-30 09:49:53 -03:00
|
|
|
# CVE-2016-1000110 - If we are running as CGI script, forget HTTP_PROXY
|
|
|
|
# (non-all-lowercase) as it may be set from the web server by a "Proxy:"
|
|
|
|
# header from the client
|
|
|
|
# If "proxy" is lowercase, it will still be used thanks to the next block
|
|
|
|
if 'REQUEST_METHOD' in os.environ:
|
|
|
|
proxies.pop('http', None)
|
|
|
|
|
|
|
|
# Get lowercase variables
|
2016-04-25 13:17:54 -03:00
|
|
|
for name, value in os.environ.items():
|
|
|
|
if name[-6:] == '_proxy':
|
|
|
|
name = name.lower()
|
|
|
|
if value:
|
|
|
|
proxies[name[:-6]] = value
|
|
|
|
else:
|
|
|
|
proxies.pop(name[:-6], None)
|
|
|
|
|
2000-07-26 04:04:38 -03:00
|
|
|
return proxies
|
|
|
|
|
2016-04-25 13:17:54 -03:00
|
|
|
def proxy_bypass_environment(host, proxies=None):
|
2008-01-20 08:05:43 -04:00
|
|
|
"""Test if proxies should not be used for a particular host.
|
|
|
|
|
2016-04-25 13:17:54 -03:00
|
|
|
Checks the proxies dict for the value of no_proxy, which should be a
|
|
|
|
list of comma separated DNS suffixes, or '*' for all hosts.
|
2008-01-20 08:05:43 -04:00
|
|
|
"""
|
2016-04-25 13:17:54 -03:00
|
|
|
if proxies is None:
|
|
|
|
proxies = getproxies_environment()
|
|
|
|
# don't bypass, if no_proxy isn't specified
|
|
|
|
try:
|
|
|
|
no_proxy = proxies['no']
|
|
|
|
except KeyError:
|
|
|
|
return 0
|
2008-01-20 08:05:43 -04:00
|
|
|
# '*' is special case for always bypass
|
|
|
|
if no_proxy == '*':
|
|
|
|
return 1
|
|
|
|
# strip port off host
|
|
|
|
hostonly, port = splitport(host)
|
|
|
|
# check if the host ends with any of the DNS suffixes
|
2011-08-06 01:24:33 -03:00
|
|
|
no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')]
|
|
|
|
for name in no_proxy_list:
|
2016-04-29 22:03:40 -03:00
|
|
|
if name:
|
|
|
|
name = re.escape(name)
|
|
|
|
pattern = r'(.+\.)?%s$' % name
|
|
|
|
if (re.match(pattern, hostonly, re.I)
|
|
|
|
or re.match(pattern, host, re.I)):
|
|
|
|
return 1
|
2008-01-20 08:05:43 -04:00
|
|
|
# otherwise, don't bypass
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
2004-07-16 08:45:00 -03:00
|
|
|
if sys.platform == 'darwin':
|
2009-09-20 07:31:22 -03:00
|
|
|
from _scproxy import _get_proxy_settings, _get_proxies
|
2008-05-12 08:31:05 -03:00
|
|
|
|
|
|
|
def proxy_bypass_macosx_sysconf(host):
|
|
|
|
"""
|
|
|
|
Return True iff this host shouldn't be accessed using a proxy
|
1999-02-25 12:12:12 -04:00
|
|
|
|
2008-05-12 08:31:05 -03:00
|
|
|
This function uses the MacOSX framework SystemConfiguration
|
|
|
|
to fetch the proxy information.
|
1999-02-25 12:12:12 -04:00
|
|
|
"""
|
2008-05-12 08:31:05 -03:00
|
|
|
import re
|
|
|
|
import socket
|
|
|
|
from fnmatch import fnmatch
|
|
|
|
|
2009-10-18 04:07:00 -03:00
|
|
|
hostonly, port = splitport(host)
|
|
|
|
|
2008-05-12 08:31:05 -03:00
|
|
|
def ip2num(ipAddr):
|
|
|
|
parts = ipAddr.split('.')
|
|
|
|
parts = map(int, parts)
|
|
|
|
if len(parts) != 4:
|
|
|
|
parts = (parts + [0, 0, 0, 0])[:4]
|
|
|
|
return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
|
|
|
|
|
2009-09-20 07:31:22 -03:00
|
|
|
proxy_settings = _get_proxy_settings()
|
2008-05-12 08:31:05 -03:00
|
|
|
|
2009-09-20 07:31:22 -03:00
|
|
|
# Check for simple host names:
|
|
|
|
if '.' not in host:
|
|
|
|
if proxy_settings['exclude_simple']:
|
|
|
|
return True
|
2008-05-12 08:31:05 -03:00
|
|
|
|
2009-10-18 04:07:00 -03:00
|
|
|
hostIP = None
|
|
|
|
|
2009-09-20 07:54:07 -03:00
|
|
|
for value in proxy_settings.get('exceptions', ()):
|
2009-09-20 07:31:22 -03:00
|
|
|
# Items in the list are strings like these: *.local, 169.254/16
|
|
|
|
if not value: continue
|
2008-05-12 08:31:05 -03:00
|
|
|
|
2009-09-20 07:31:22 -03:00
|
|
|
m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
|
|
|
|
if m is not None:
|
|
|
|
if hostIP is None:
|
2009-10-18 04:07:00 -03:00
|
|
|
try:
|
|
|
|
hostIP = socket.gethostbyname(hostonly)
|
|
|
|
hostIP = ip2num(hostIP)
|
|
|
|
except socket.error:
|
|
|
|
continue
|
2008-05-12 08:31:05 -03:00
|
|
|
|
2009-09-20 07:31:22 -03:00
|
|
|
base = ip2num(m.group(1))
|
2010-06-27 10:59:39 -03:00
|
|
|
mask = m.group(2)
|
|
|
|
if mask is None:
|
|
|
|
mask = 8 * (m.group(1).count('.') + 1)
|
|
|
|
|
|
|
|
else:
|
|
|
|
mask = int(mask[1:])
|
2011-03-14 19:53:59 -03:00
|
|
|
mask = 32 - mask
|
2000-09-24 15:51:25 -03:00
|
|
|
|
2009-09-20 07:31:22 -03:00
|
|
|
if (hostIP >> mask) == (base >> mask):
|
2008-05-12 08:31:05 -03:00
|
|
|
return True
|
|
|
|
|
2009-09-20 07:31:22 -03:00
|
|
|
elif fnmatch(host, value):
|
|
|
|
return True
|
2008-05-12 08:31:05 -03:00
|
|
|
|
2009-09-20 07:31:22 -03:00
|
|
|
return False
|
2008-05-12 08:31:05 -03:00
|
|
|
|
|
|
|
def getproxies_macosx_sysconf():
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
This function uses the MacOSX framework SystemConfiguration
|
|
|
|
to fetch the proxy information.
|
|
|
|
"""
|
2009-09-20 07:31:22 -03:00
|
|
|
return _get_proxies()
|
2000-07-26 04:04:38 -03:00
|
|
|
|
2008-01-20 08:05:43 -04:00
|
|
|
def proxy_bypass(host):
|
2016-04-25 13:17:54 -03:00
|
|
|
"""Return True, if a host should be bypassed.
|
|
|
|
|
|
|
|
Checks proxy settings gathered from the environment, if specified, or
|
|
|
|
from the MacOSX framework SystemConfiguration.
|
|
|
|
"""
|
|
|
|
proxies = getproxies_environment()
|
|
|
|
if proxies:
|
|
|
|
return proxy_bypass_environment(host, proxies)
|
2008-01-20 08:05:43 -04:00
|
|
|
else:
|
2008-05-12 08:31:05 -03:00
|
|
|
return proxy_bypass_macosx_sysconf(host)
|
2001-08-09 15:04:14 -03:00
|
|
|
|
2004-07-16 08:45:00 -03:00
|
|
|
def getproxies():
|
2008-05-12 08:31:05 -03:00
|
|
|
return getproxies_environment() or getproxies_macosx_sysconf()
|
2004-07-18 03:16:08 -03:00
|
|
|
|
2000-07-26 04:04:38 -03:00
|
|
|
elif os.name == 'nt':
|
|
|
|
def getproxies_registry():
|
1999-02-25 12:12:12 -04:00
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
2000-07-26 04:04:38 -03:00
|
|
|
|
|
|
|
Win32 uses the registry to store proxies.
|
|
|
|
|
1999-02-25 12:12:12 -04:00
|
|
|
"""
|
|
|
|
proxies = {}
|
2000-07-26 04:04:38 -03:00
|
|
|
try:
|
|
|
|
import _winreg
|
|
|
|
except ImportError:
|
|
|
|
# Std module, so should be around - but you never know!
|
|
|
|
return proxies
|
|
|
|
try:
|
2000-09-24 15:51:25 -03:00
|
|
|
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
|
|
|
|
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
2000-07-26 04:04:38 -03:00
|
|
|
proxyEnable = _winreg.QueryValueEx(internetSettings,
|
|
|
|
'ProxyEnable')[0]
|
|
|
|
if proxyEnable:
|
|
|
|
# Returned as Unicode but problems if not converted to ASCII
|
|
|
|
proxyServer = str(_winreg.QueryValueEx(internetSettings,
|
|
|
|
'ProxyServer')[0])
|
2000-09-24 15:51:25 -03:00
|
|
|
if '=' in proxyServer:
|
|
|
|
# Per-protocol settings
|
2000-07-26 04:04:38 -03:00
|
|
|
for p in proxyServer.split(';'):
|
2000-09-24 15:51:25 -03:00
|
|
|
protocol, address = p.split('=', 1)
|
2002-03-31 19:38:48 -04:00
|
|
|
# See if address has a type:// prefix
|
2002-04-02 10:38:16 -04:00
|
|
|
import re
|
|
|
|
if not re.match('^([^/:]+)://', address):
|
2002-03-31 19:38:48 -04:00
|
|
|
address = '%s://%s' % (protocol, address)
|
|
|
|
proxies[protocol] = address
|
2000-09-24 15:51:25 -03:00
|
|
|
else:
|
|
|
|
# Use one setting for all protocols
|
|
|
|
if proxyServer[:5] == 'http:':
|
|
|
|
proxies['http'] = proxyServer
|
|
|
|
else:
|
|
|
|
proxies['http'] = 'http://%s' % proxyServer
|
2010-07-14 17:22:17 -03:00
|
|
|
proxies['https'] = 'https://%s' % proxyServer
|
2000-09-24 15:51:25 -03:00
|
|
|
proxies['ftp'] = 'ftp://%s' % proxyServer
|
2000-07-26 04:04:38 -03:00
|
|
|
internetSettings.Close()
|
|
|
|
except (WindowsError, ValueError, TypeError):
|
|
|
|
# Either registry key not found etc, or the value in an
|
|
|
|
# unexpected format.
|
|
|
|
# proxies already set up to be empty so nothing to do
|
|
|
|
pass
|
1999-02-25 12:12:12 -04:00
|
|
|
return proxies
|
1996-03-20 11:33:11 -04:00
|
|
|
|
2000-07-26 04:04:38 -03:00
|
|
|
def getproxies():
|
|
|
|
"""Return a dictionary of scheme -> proxy server URL mappings.
|
|
|
|
|
|
|
|
Returns settings gathered from the environment, if specified,
|
|
|
|
or the registry.
|
|
|
|
|
|
|
|
"""
|
|
|
|
return getproxies_environment() or getproxies_registry()
|
2001-08-09 15:04:14 -03:00
|
|
|
|
2008-01-20 08:05:43 -04:00
|
|
|
def proxy_bypass_registry(host):
|
2001-08-09 15:04:14 -03:00
|
|
|
try:
|
|
|
|
import _winreg
|
|
|
|
import re
|
|
|
|
except ImportError:
|
|
|
|
# Std modules, so should be around - but you never know!
|
|
|
|
return 0
|
|
|
|
try:
|
|
|
|
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
|
|
|
|
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
|
|
|
proxyEnable = _winreg.QueryValueEx(internetSettings,
|
|
|
|
'ProxyEnable')[0]
|
|
|
|
proxyOverride = str(_winreg.QueryValueEx(internetSettings,
|
|
|
|
'ProxyOverride')[0])
|
|
|
|
# ^^^^ Returned as Unicode but problems if not converted to ASCII
|
|
|
|
except WindowsError:
|
|
|
|
return 0
|
|
|
|
if not proxyEnable or not proxyOverride:
|
|
|
|
return 0
|
|
|
|
# try to make a host list from name and IP address.
|
2006-02-18 19:10:23 -04:00
|
|
|
rawHost, port = splitport(host)
|
|
|
|
host = [rawHost]
|
2001-08-09 15:04:14 -03:00
|
|
|
try:
|
2006-02-18 19:10:23 -04:00
|
|
|
addr = socket.gethostbyname(rawHost)
|
|
|
|
if addr != rawHost:
|
2001-08-09 15:04:14 -03:00
|
|
|
host.append(addr)
|
|
|
|
except socket.error:
|
|
|
|
pass
|
2006-02-18 19:10:23 -04:00
|
|
|
try:
|
|
|
|
fqdn = socket.getfqdn(rawHost)
|
|
|
|
if fqdn != rawHost:
|
|
|
|
host.append(fqdn)
|
|
|
|
except socket.error:
|
|
|
|
pass
|
2001-08-09 15:04:14 -03:00
|
|
|
# make a check value list from the registry entry: replace the
|
|
|
|
# '<local>' string by the localhost entry and the corresponding
|
|
|
|
# canonical entry.
|
|
|
|
proxyOverride = proxyOverride.split(';')
|
|
|
|
# now check if we match one of the registry values.
|
|
|
|
for test in proxyOverride:
|
2009-05-01 02:59:52 -03:00
|
|
|
if test == '<local>':
|
|
|
|
if '.' not in rawHost:
|
|
|
|
return 1
|
2001-08-09 18:40:30 -03:00
|
|
|
test = test.replace(".", r"\.") # mask dots
|
|
|
|
test = test.replace("*", r".*") # change glob sequence
|
|
|
|
test = test.replace("?", r".") # change glob char
|
2001-08-09 15:04:14 -03:00
|
|
|
for val in host:
|
|
|
|
# print "%s <--> %s" %( test, val )
|
|
|
|
if re.match(test, val, re.I):
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
|
2008-01-20 08:05:43 -04:00
|
|
|
def proxy_bypass(host):
|
2016-04-25 13:17:54 -03:00
|
|
|
"""Return True, if the host should be bypassed.
|
2008-01-20 08:05:43 -04:00
|
|
|
|
2016-04-25 13:17:54 -03:00
|
|
|
Checks proxy settings gathered from the environment, if specified,
|
2008-01-20 08:05:43 -04:00
|
|
|
or the registry.
|
|
|
|
"""
|
2016-04-25 13:17:54 -03:00
|
|
|
proxies = getproxies_environment()
|
|
|
|
if proxies:
|
|
|
|
return proxy_bypass_environment(host, proxies)
|
2008-01-20 08:05:43 -04:00
|
|
|
else:
|
|
|
|
return proxy_bypass_registry(host)
|
|
|
|
|
2000-07-26 04:04:38 -03:00
|
|
|
else:
|
|
|
|
# By default use environment variables
|
|
|
|
getproxies = getproxies_environment
|
2008-01-20 08:05:43 -04:00
|
|
|
proxy_bypass = proxy_bypass_environment
|
1996-03-20 11:33:11 -04:00
|
|
|
|
1994-03-22 08:05:32 -04:00
|
|
|
# Test and time quote() and unquote()
|
|
|
|
def test1():
|
1999-02-25 12:12:12 -04:00
|
|
|
s = ''
|
|
|
|
for i in range(256): s = s + chr(i)
|
|
|
|
s = s*4
|
|
|
|
t0 = time.time()
|
|
|
|
qs = quote(s)
|
|
|
|
uqs = unquote(qs)
|
|
|
|
t1 = time.time()
|
|
|
|
if uqs != s:
|
|
|
|
print 'Wrong!'
|
2004-02-12 13:35:32 -04:00
|
|
|
print repr(s)
|
|
|
|
print repr(qs)
|
|
|
|
print repr(uqs)
|
1999-02-25 12:12:12 -04:00
|
|
|
print round(t1 - t0, 3), 'sec'
|
1994-03-22 08:05:32 -04:00
|
|
|
|
|
|
|
|
1998-09-28 11:07:00 -03:00
|
|
|
def reporthook(blocknum, blocksize, totalsize):
|
|
|
|
# Report during remote transfers
|
2000-12-15 11:01:37 -04:00
|
|
|
print "Block number: %d, Block size: %d, Total size: %d" % (
|
|
|
|
blocknum, blocksize, totalsize)
|