2016-09-07 09:03:06 -03:00
|
|
|
"""Regression tests for urllib"""
|
2003-04-25 06:39:47 -03:00
|
|
|
|
2016-04-25 13:17:54 -03:00
|
|
|
import collections
|
2000-08-31 12:48:10 -03:00
|
|
|
import urllib
|
2004-06-05 10:30:56 -03:00
|
|
|
import httplib
|
2014-09-06 15:41:39 -03:00
|
|
|
import io
|
2003-04-25 06:39:47 -03:00
|
|
|
import unittest
|
|
|
|
import os
|
2011-04-14 01:54:35 -03:00
|
|
|
import sys
|
2003-04-25 06:39:47 -03:00
|
|
|
import mimetools
|
2005-08-26 05:51:34 -03:00
|
|
|
import tempfile
|
2003-04-25 06:39:47 -03:00
|
|
|
|
2012-01-10 12:09:24 -04:00
|
|
|
from test import test_support
|
|
|
|
from base64 import b64encode
|
|
|
|
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
def hexescape(char):
|
|
|
|
"""Escape char as RFC 2396 specifies"""
|
|
|
|
hex_repr = hex(ord(char))[2:].upper()
|
|
|
|
if len(hex_repr) == 1:
|
|
|
|
hex_repr = "0%s" % hex_repr
|
|
|
|
return "%" + hex_repr
|
|
|
|
|
2011-10-31 15:44:45 -03:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
def fakehttp(fakedata):
|
|
|
|
class FakeSocket(io.BytesIO):
|
|
|
|
|
|
|
|
def sendall(self, data):
|
|
|
|
FakeHTTPConnection.buf = data
|
2011-10-31 15:44:45 -03:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
def makefile(self, *args, **kwds):
|
|
|
|
return self
|
2012-01-10 12:09:24 -04:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
def read(self, amt=None):
|
|
|
|
if self.closed:
|
|
|
|
return b""
|
|
|
|
return io.BytesIO.read(self, amt)
|
2011-10-31 15:44:45 -03:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
def readline(self, length=None):
|
|
|
|
if self.closed:
|
|
|
|
return b""
|
|
|
|
return io.BytesIO.readline(self, length)
|
2011-10-31 15:44:45 -03:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
class FakeHTTPConnection(httplib.HTTPConnection):
|
2011-10-31 15:44:45 -03:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
# buffer to store data for verification in urlopen tests.
|
|
|
|
buf = ""
|
2012-01-10 12:09:24 -04:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
def connect(self):
|
2016-05-15 22:07:13 -03:00
|
|
|
self.sock = FakeSocket(self.fakedata)
|
|
|
|
self.__class__.fakesock = self.sock
|
|
|
|
FakeHTTPConnection.fakedata = fakedata
|
2012-01-10 12:09:24 -04:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
return FakeHTTPConnection
|
2012-01-10 12:09:24 -04:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
|
|
|
|
class FakeHTTPMixin(object):
|
|
|
|
def fakehttp(self, fakedata):
|
2011-10-31 15:44:45 -03:00
|
|
|
assert httplib.HTTP._connection_class == httplib.HTTPConnection
|
2012-01-10 12:09:24 -04:00
|
|
|
|
2014-09-06 15:41:39 -03:00
|
|
|
httplib.HTTP._connection_class = fakehttp(fakedata)
|
2011-10-31 15:44:45 -03:00
|
|
|
|
|
|
|
def unfakehttp(self):
|
|
|
|
httplib.HTTP._connection_class = httplib.HTTPConnection
|
|
|
|
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
class urlopen_FileTests(unittest.TestCase):
|
|
|
|
"""Test urlopen() opening a temporary file.
|
|
|
|
|
|
|
|
Try to test as much functionality as possible so as to cut down on reliance
|
2004-06-29 10:07:53 -03:00
|
|
|
on connecting to the Net for testing.
|
2003-04-25 06:39:47 -03:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
"""Setup of a temp file to use for testing"""
|
|
|
|
self.text = "test_urllib: %s\n" % self.__class__.__name__
|
2003-04-25 12:01:05 -03:00
|
|
|
FILE = file(test_support.TESTFN, 'wb')
|
2003-04-25 06:39:47 -03:00
|
|
|
try:
|
|
|
|
FILE.write(self.text)
|
|
|
|
finally:
|
|
|
|
FILE.close()
|
|
|
|
self.pathname = test_support.TESTFN
|
|
|
|
self.returned_obj = urllib.urlopen("file:%s" % self.pathname)
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
"""Shut down the open object"""
|
|
|
|
self.returned_obj.close()
|
2003-04-29 02:08:06 -03:00
|
|
|
os.remove(test_support.TESTFN)
|
2003-04-25 06:39:47 -03:00
|
|
|
|
|
|
|
def test_interface(self):
|
|
|
|
# Make sure object returned by urlopen() has the specified methods
|
|
|
|
for attr in ("read", "readline", "readlines", "fileno",
|
2008-01-20 07:43:03 -04:00
|
|
|
"close", "info", "geturl", "getcode", "__iter__"):
|
2009-06-30 19:57:08 -03:00
|
|
|
self.assertTrue(hasattr(self.returned_obj, attr),
|
2003-04-25 06:39:47 -03:00
|
|
|
"object returned by urlopen() lacks %s attribute" %
|
|
|
|
attr)
|
|
|
|
|
|
|
|
def test_read(self):
|
|
|
|
self.assertEqual(self.text, self.returned_obj.read())
|
|
|
|
|
|
|
|
def test_readline(self):
|
|
|
|
self.assertEqual(self.text, self.returned_obj.readline())
|
|
|
|
self.assertEqual('', self.returned_obj.readline(),
|
|
|
|
"calling readline() after exhausting the file did not"
|
|
|
|
" return an empty string")
|
2000-08-31 12:48:10 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
def test_readlines(self):
|
|
|
|
lines_list = self.returned_obj.readlines()
|
|
|
|
self.assertEqual(len(lines_list), 1,
|
|
|
|
"readlines() returned the wrong number of lines")
|
|
|
|
self.assertEqual(lines_list[0], self.text,
|
|
|
|
"readlines() returned improper text")
|
|
|
|
|
|
|
|
def test_fileno(self):
|
|
|
|
file_num = self.returned_obj.fileno()
|
2010-01-24 12:58:36 -04:00
|
|
|
self.assertIsInstance(file_num, int, "fileno() did not return an int")
|
2003-04-25 06:39:47 -03:00
|
|
|
self.assertEqual(os.read(file_num, len(self.text)), self.text,
|
|
|
|
"Reading on the file descriptor returned by fileno() "
|
|
|
|
"did not return the expected text")
|
|
|
|
|
|
|
|
def test_close(self):
|
|
|
|
# Test close() by calling it hear and then having it be called again
|
|
|
|
# by the tearDown() method for the test
|
|
|
|
self.returned_obj.close()
|
|
|
|
|
|
|
|
def test_info(self):
|
2010-01-24 12:58:36 -04:00
|
|
|
self.assertIsInstance(self.returned_obj.info(), mimetools.Message)
|
2003-04-25 06:39:47 -03:00
|
|
|
|
|
|
|
def test_geturl(self):
|
|
|
|
self.assertEqual(self.returned_obj.geturl(), self.pathname)
|
|
|
|
|
2008-01-20 07:43:03 -04:00
|
|
|
def test_getcode(self):
|
|
|
|
self.assertEqual(self.returned_obj.getcode(), None)
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
def test_iter(self):
|
|
|
|
# Test iterator
|
|
|
|
# Don't need to count number of iterations since test would fail the
|
|
|
|
# instant it returned anything beyond the first line from the
|
|
|
|
# comparison
|
|
|
|
for line in self.returned_obj.__iter__():
|
|
|
|
self.assertEqual(line, self.text)
|
|
|
|
|
2012-01-20 23:43:02 -04:00
|
|
|
def test_relativelocalfile(self):
|
|
|
|
self.assertRaises(ValueError,urllib.urlopen,'./' + self.pathname)
|
|
|
|
|
2008-09-21 18:27:51 -03:00
|
|
|
class ProxyTests(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
2009-04-26 17:51:44 -03:00
|
|
|
# Records changes to env vars
|
|
|
|
self.env = test_support.EnvironmentVarGuard()
|
2008-09-21 18:27:51 -03:00
|
|
|
# Delete all proxy related env vars
|
2010-01-08 15:20:25 -04:00
|
|
|
for k in os.environ.keys():
|
2009-04-26 17:51:44 -03:00
|
|
|
if 'proxy' in k.lower():
|
2009-09-30 22:50:13 -03:00
|
|
|
self.env.unset(k)
|
2008-09-21 18:27:51 -03:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
# Restore all proxy related env vars
|
2009-04-26 17:51:44 -03:00
|
|
|
self.env.__exit__()
|
|
|
|
del self.env
|
2008-09-21 18:27:51 -03:00
|
|
|
|
|
|
|
def test_getproxies_environment_keep_no_proxies(self):
|
2009-04-26 17:51:44 -03:00
|
|
|
self.env.set('NO_PROXY', 'localhost')
|
2008-09-21 18:27:51 -03:00
|
|
|
proxies = urllib.getproxies_environment()
|
|
|
|
# getproxies_environment use lowered case truncated (no '_proxy') keys
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual('localhost', proxies['no'])
|
2011-08-06 01:24:33 -03:00
|
|
|
# List of no_proxies with space.
|
2016-04-25 13:17:54 -03:00
|
|
|
self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com:1234')
|
2011-08-06 01:24:33 -03:00
|
|
|
self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com'))
|
2016-04-25 13:17:54 -03:00
|
|
|
self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com:8888'))
|
|
|
|
self.assertTrue(urllib.proxy_bypass_environment('newdomain.com:1234'))
|
|
|
|
|
2016-07-30 09:49:53 -03:00
|
|
|
def test_proxy_cgi_ignore(self):
|
|
|
|
try:
|
|
|
|
self.env.set('HTTP_PROXY', 'http://somewhere:3128')
|
|
|
|
proxies = urllib.getproxies_environment()
|
|
|
|
self.assertEqual('http://somewhere:3128', proxies['http'])
|
|
|
|
self.env.set('REQUEST_METHOD', 'GET')
|
|
|
|
proxies = urllib.getproxies_environment()
|
|
|
|
self.assertNotIn('http', proxies)
|
|
|
|
finally:
|
|
|
|
self.env.unset('REQUEST_METHOD')
|
|
|
|
self.env.unset('HTTP_PROXY')
|
|
|
|
|
2016-04-29 22:03:40 -03:00
|
|
|
def test_proxy_bypass_environment_host_match(self):
|
|
|
|
bypass = urllib.proxy_bypass_environment
|
|
|
|
self.env.set('NO_PROXY',
|
2017-01-08 23:43:24 -04:00
|
|
|
'localhost, anotherdomain.com, newdomain.com:1234, .d.o.t')
|
2016-04-29 22:03:40 -03:00
|
|
|
self.assertTrue(bypass('localhost'))
|
|
|
|
self.assertTrue(bypass('LocalHost')) # MixedCase
|
|
|
|
self.assertTrue(bypass('LOCALHOST')) # UPPERCASE
|
|
|
|
self.assertTrue(bypass('newdomain.com:1234'))
|
2017-01-08 23:43:24 -04:00
|
|
|
self.assertTrue(bypass('foo.d.o.t')) # issue 29142
|
2016-04-29 22:03:40 -03:00
|
|
|
self.assertTrue(bypass('anotherdomain.com:8888'))
|
|
|
|
self.assertTrue(bypass('www.newdomain.com:1234'))
|
|
|
|
self.assertFalse(bypass('prelocalhost'))
|
|
|
|
self.assertFalse(bypass('newdomain.com')) # no port
|
|
|
|
self.assertFalse(bypass('newdomain.com:1235')) # wrong port
|
2016-04-25 13:17:54 -03:00
|
|
|
|
|
|
|
class ProxyTests_withOrderedEnv(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
# We need to test conditions, where variable order _is_ significant
|
|
|
|
self._saved_env = os.environ
|
|
|
|
# Monkey patch os.environ, start with empty fake environment
|
|
|
|
os.environ = collections.OrderedDict()
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
os.environ = self._saved_env
|
|
|
|
|
|
|
|
def test_getproxies_environment_prefer_lowercase(self):
|
|
|
|
# Test lowercase preference with removal
|
|
|
|
os.environ['no_proxy'] = ''
|
|
|
|
os.environ['No_Proxy'] = 'localhost'
|
|
|
|
self.assertFalse(urllib.proxy_bypass_environment('localhost'))
|
|
|
|
self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
|
|
|
|
os.environ['http_proxy'] = ''
|
|
|
|
os.environ['HTTP_PROXY'] = 'http://somewhere:3128'
|
|
|
|
proxies = urllib.getproxies_environment()
|
|
|
|
self.assertEqual({}, proxies)
|
|
|
|
# Test lowercase preference of proxy bypass and correct matching including ports
|
|
|
|
os.environ['no_proxy'] = 'localhost, noproxy.com, my.proxy:1234'
|
|
|
|
os.environ['No_Proxy'] = 'xyz.com'
|
|
|
|
self.assertTrue(urllib.proxy_bypass_environment('localhost'))
|
|
|
|
self.assertTrue(urllib.proxy_bypass_environment('noproxy.com:5678'))
|
|
|
|
self.assertTrue(urllib.proxy_bypass_environment('my.proxy:1234'))
|
|
|
|
self.assertFalse(urllib.proxy_bypass_environment('my.proxy'))
|
|
|
|
self.assertFalse(urllib.proxy_bypass_environment('arbitrary'))
|
|
|
|
# Test lowercase preference with replacement
|
|
|
|
os.environ['http_proxy'] = 'http://somewhere:3128'
|
|
|
|
os.environ['Http_Proxy'] = 'http://somewhereelse:3128'
|
|
|
|
proxies = urllib.getproxies_environment()
|
|
|
|
self.assertEqual('http://somewhere:3128', proxies['http'])
|
2008-09-21 18:27:51 -03:00
|
|
|
|
|
|
|
|
2011-10-31 15:44:45 -03:00
|
|
|
class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin):
|
2004-06-05 10:30:56 -03:00
|
|
|
"""Test urlopen() opening a fake http connection."""
|
|
|
|
|
|
|
|
def test_read(self):
|
|
|
|
self.fakehttp('Hello!')
|
|
|
|
try:
|
|
|
|
fp = urllib.urlopen("http://python.org/")
|
|
|
|
self.assertEqual(fp.readline(), 'Hello!')
|
|
|
|
self.assertEqual(fp.readline(), '')
|
2008-01-20 07:43:03 -04:00
|
|
|
self.assertEqual(fp.geturl(), 'http://python.org/')
|
|
|
|
self.assertEqual(fp.getcode(), 200)
|
2004-06-05 10:30:56 -03:00
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2011-04-12 20:31:45 -03:00
|
|
|
def test_url_fragment(self):
|
|
|
|
# Issue #11703: geturl() omits fragments in the original URL.
|
|
|
|
url = 'http://docs.python.org/library/urllib.html#OK'
|
|
|
|
self.fakehttp('Hello!')
|
|
|
|
try:
|
|
|
|
fp = urllib.urlopen(url)
|
|
|
|
self.assertEqual(fp.geturl(), url)
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2019-05-21 10:12:33 -03:00
|
|
|
def test_url_with_control_char_rejected(self):
|
|
|
|
for char_no in range(0, 0x21) + range(0x7f, 0x100):
|
|
|
|
char = chr(char_no)
|
|
|
|
schemeless_url = "//localhost:7777/test%s/" % char
|
|
|
|
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
|
|
|
|
try:
|
|
|
|
# urllib quotes the URL so there is no injection.
|
|
|
|
resp = urllib.urlopen("http:" + schemeless_url)
|
|
|
|
self.assertNotIn(char, resp.geturl())
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
|
|
|
def test_url_with_newline_header_injection_rejected(self):
|
|
|
|
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
|
|
|
|
host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123"
|
|
|
|
schemeless_url = "//" + host + ":8080/test/?test=a"
|
|
|
|
try:
|
|
|
|
# urllib quotes the URL so there is no injection.
|
|
|
|
resp = urllib.urlopen("http:" + schemeless_url)
|
|
|
|
self.assertNotIn(' ', resp.geturl())
|
|
|
|
self.assertNotIn('\r', resp.geturl())
|
|
|
|
self.assertNotIn('\n', resp.geturl())
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2008-01-02 00:11:28 -04:00
|
|
|
def test_read_bogus(self):
|
2008-01-02 01:23:38 -04:00
|
|
|
# urlopen() should raise IOError for many error codes.
|
2008-01-02 00:11:28 -04:00
|
|
|
self.fakehttp('''HTTP/1.1 401 Authentication Required
|
|
|
|
Date: Wed, 02 Jan 2008 03:03:54 GMT
|
|
|
|
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
|
|
|
|
Connection: close
|
|
|
|
Content-Type: text/html; charset=iso-8859-1
|
|
|
|
''')
|
|
|
|
try:
|
|
|
|
self.assertRaises(IOError, urllib.urlopen, "http://python.org/")
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2011-03-28 17:47:01 -03:00
|
|
|
def test_invalid_redirect(self):
|
|
|
|
# urlopen() should raise IOError for many error codes.
|
|
|
|
self.fakehttp("""HTTP/1.1 302 Found
|
|
|
|
Date: Wed, 02 Jan 2008 03:03:54 GMT
|
|
|
|
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
|
|
|
|
Location: file:README
|
|
|
|
Connection: close
|
|
|
|
Content-Type: text/html; charset=iso-8859-1
|
|
|
|
""")
|
|
|
|
try:
|
2016-02-04 02:01:35 -04:00
|
|
|
msg = "Redirection to url 'file:"
|
|
|
|
with self.assertRaisesRegexp(IOError, msg):
|
|
|
|
urllib.urlopen("http://python.org/")
|
2011-03-28 17:47:01 -03:00
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2016-02-04 02:01:35 -04:00
|
|
|
def test_redirect_limit_independent(self):
|
|
|
|
# Ticket #12923: make sure independent requests each use their
|
|
|
|
# own retry limit.
|
|
|
|
for i in range(urllib.FancyURLopener().maxtries):
|
|
|
|
self.fakehttp(b'''HTTP/1.1 302 Found
|
|
|
|
Location: file://guidocomputer.athome.com:/python/license
|
|
|
|
Connection: close
|
|
|
|
''')
|
|
|
|
try:
|
|
|
|
self.assertRaises(IOError, urllib.urlopen,
|
|
|
|
"http://something")
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2007-03-14 05:27:52 -03:00
|
|
|
def test_empty_socket(self):
|
2008-01-02 01:23:38 -04:00
|
|
|
# urlopen() raises IOError if the underlying socket does not send any
|
|
|
|
# data. (#1680230)
|
2007-03-14 05:27:52 -03:00
|
|
|
self.fakehttp('')
|
|
|
|
try:
|
|
|
|
self.assertRaises(IOError, urllib.urlopen, 'http://something')
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2012-10-27 07:48:40 -03:00
|
|
|
def test_missing_localfile(self):
|
|
|
|
self.assertRaises(IOError, urllib.urlopen,
|
|
|
|
'file://localhost/a/missing/file.py')
|
|
|
|
fd, tmp_file = tempfile.mkstemp()
|
|
|
|
tmp_fileurl = 'file://localhost/' + tmp_file.replace(os.path.sep, '/')
|
2013-06-01 11:59:10 -03:00
|
|
|
self.assertTrue(os.path.exists(tmp_file))
|
2012-10-27 07:48:40 -03:00
|
|
|
try:
|
|
|
|
fp = urllib.urlopen(tmp_fileurl)
|
2013-06-01 11:59:10 -03:00
|
|
|
fp.close()
|
2012-10-27 07:48:40 -03:00
|
|
|
finally:
|
|
|
|
os.close(fd)
|
2013-06-01 11:59:10 -03:00
|
|
|
os.unlink(tmp_file)
|
2012-10-27 07:48:40 -03:00
|
|
|
|
|
|
|
self.assertFalse(os.path.exists(tmp_file))
|
|
|
|
self.assertRaises(IOError, urllib.urlopen, tmp_fileurl)
|
|
|
|
|
|
|
|
def test_ftp_nonexisting(self):
|
|
|
|
self.assertRaises(IOError, urllib.urlopen,
|
|
|
|
'ftp://localhost/not/existing/file.py')
|
|
|
|
|
|
|
|
|
2012-01-10 12:09:24 -04:00
|
|
|
def test_userpass_inurl(self):
|
|
|
|
self.fakehttp('Hello!')
|
|
|
|
try:
|
|
|
|
fakehttp_wrapper = httplib.HTTP._connection_class
|
|
|
|
fp = urllib.urlopen("http://user:pass@python.org/")
|
|
|
|
authorization = ("Authorization: Basic %s\r\n" %
|
|
|
|
b64encode('user:pass'))
|
|
|
|
# The authorization header must be in place
|
|
|
|
self.assertIn(authorization, fakehttp_wrapper.buf)
|
|
|
|
self.assertEqual(fp.readline(), "Hello!")
|
|
|
|
self.assertEqual(fp.readline(), "")
|
|
|
|
self.assertEqual(fp.geturl(), 'http://user:pass@python.org/')
|
|
|
|
self.assertEqual(fp.getcode(), 200)
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
|
|
|
def test_userpass_with_spaces_inurl(self):
|
|
|
|
self.fakehttp('Hello!')
|
|
|
|
try:
|
|
|
|
url = "http://a b:c d@python.org/"
|
|
|
|
fakehttp_wrapper = httplib.HTTP._connection_class
|
|
|
|
authorization = ("Authorization: Basic %s\r\n" %
|
|
|
|
b64encode('a b:c d'))
|
|
|
|
fp = urllib.urlopen(url)
|
|
|
|
# The authorization header must be in place
|
|
|
|
self.assertIn(authorization, fakehttp_wrapper.buf)
|
|
|
|
self.assertEqual(fp.readline(), "Hello!")
|
|
|
|
self.assertEqual(fp.readline(), "")
|
|
|
|
# the spaces are quoted in URL so no match
|
|
|
|
self.assertNotEqual(fp.geturl(), url)
|
|
|
|
self.assertEqual(fp.getcode(), 200)
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
|
|
|
|
2003-04-29 02:08:06 -03:00
|
|
|
class urlretrieve_FileTests(unittest.TestCase):
|
2003-04-25 06:39:47 -03:00
|
|
|
"""Test urllib.urlretrieve() on local files"""
|
|
|
|
|
2003-04-29 02:08:06 -03:00
|
|
|
def setUp(self):
|
2005-08-26 05:51:34 -03:00
|
|
|
# Create a list of temporary files. Each item in the list is a file
|
|
|
|
# name (absolute path or relative to the current working directory).
|
|
|
|
# All files in this list will be deleted in the tearDown method. Note,
|
|
|
|
# this only helps to makes sure temporary files get deleted, but it
|
|
|
|
# does nothing about trying to close files that may still be open. It
|
|
|
|
# is the responsibility of the developer to properly close files even
|
|
|
|
# when exceptional conditions occur.
|
|
|
|
self.tempFiles = []
|
|
|
|
|
2003-04-29 02:08:06 -03:00
|
|
|
# Create a temporary file.
|
2005-08-26 05:51:34 -03:00
|
|
|
self.registerFileForCleanUp(test_support.TESTFN)
|
2003-04-29 02:08:06 -03:00
|
|
|
self.text = 'testing urllib.urlretrieve'
|
2005-08-26 05:51:34 -03:00
|
|
|
try:
|
|
|
|
FILE = file(test_support.TESTFN, 'wb')
|
|
|
|
FILE.write(self.text)
|
|
|
|
FILE.close()
|
|
|
|
finally:
|
|
|
|
try: FILE.close()
|
|
|
|
except: pass
|
2003-04-29 02:08:06 -03:00
|
|
|
|
|
|
|
def tearDown(self):
|
2005-08-26 05:51:34 -03:00
|
|
|
# Delete the temporary files.
|
|
|
|
for each in self.tempFiles:
|
|
|
|
try: os.remove(each)
|
|
|
|
except: pass
|
|
|
|
|
|
|
|
def constructLocalFileUrl(self, filePath):
|
|
|
|
return "file://%s" % urllib.pathname2url(os.path.abspath(filePath))
|
|
|
|
|
|
|
|
def createNewTempFile(self, data=""):
|
|
|
|
"""Creates a new temporary file containing the specified data,
|
|
|
|
registers the file for deletion during the test fixture tear down, and
|
|
|
|
returns the absolute path of the file."""
|
|
|
|
|
|
|
|
newFd, newFilePath = tempfile.mkstemp()
|
|
|
|
try:
|
|
|
|
self.registerFileForCleanUp(newFilePath)
|
|
|
|
newFile = os.fdopen(newFd, "wb")
|
|
|
|
newFile.write(data)
|
|
|
|
newFile.close()
|
|
|
|
finally:
|
|
|
|
try: newFile.close()
|
|
|
|
except: pass
|
|
|
|
return newFilePath
|
|
|
|
|
|
|
|
def registerFileForCleanUp(self, fileName):
|
|
|
|
self.tempFiles.append(fileName)
|
2003-04-29 02:08:06 -03:00
|
|
|
|
|
|
|
def test_basic(self):
|
|
|
|
# Make sure that a local file just gets its own location returned and
|
|
|
|
# a headers value is returned.
|
|
|
|
result = urllib.urlretrieve("file:%s" % test_support.TESTFN)
|
|
|
|
self.assertEqual(result[0], test_support.TESTFN)
|
2010-01-24 12:58:36 -04:00
|
|
|
self.assertIsInstance(result[1], mimetools.Message,
|
|
|
|
"did not get a mimetools.Message instance as "
|
|
|
|
"second returned value")
|
2003-04-29 02:08:06 -03:00
|
|
|
|
|
|
|
def test_copy(self):
|
|
|
|
# Test that setting the filename argument works.
|
|
|
|
second_temp = "%s.2" % test_support.TESTFN
|
2005-08-26 05:51:34 -03:00
|
|
|
self.registerFileForCleanUp(second_temp)
|
|
|
|
result = urllib.urlretrieve(self.constructLocalFileUrl(
|
|
|
|
test_support.TESTFN), second_temp)
|
2003-04-29 02:08:06 -03:00
|
|
|
self.assertEqual(second_temp, result[0])
|
2009-06-30 19:57:08 -03:00
|
|
|
self.assertTrue(os.path.exists(second_temp), "copy of the file was not "
|
2003-04-29 02:08:06 -03:00
|
|
|
"made")
|
|
|
|
FILE = file(second_temp, 'rb')
|
|
|
|
try:
|
|
|
|
text = FILE.read()
|
|
|
|
FILE.close()
|
2005-08-26 05:51:34 -03:00
|
|
|
finally:
|
|
|
|
try: FILE.close()
|
|
|
|
except: pass
|
2003-04-29 02:08:06 -03:00
|
|
|
self.assertEqual(self.text, text)
|
|
|
|
|
|
|
|
def test_reporthook(self):
|
|
|
|
# Make sure that the reporthook works.
|
|
|
|
def hooktester(count, block_size, total_size, count_holder=[0]):
|
2010-01-24 12:58:36 -04:00
|
|
|
self.assertIsInstance(count, int)
|
|
|
|
self.assertIsInstance(block_size, int)
|
|
|
|
self.assertIsInstance(total_size, int)
|
2003-04-29 02:08:06 -03:00
|
|
|
self.assertEqual(count, count_holder[0])
|
|
|
|
count_holder[0] = count_holder[0] + 1
|
|
|
|
second_temp = "%s.2" % test_support.TESTFN
|
2005-08-26 05:51:34 -03:00
|
|
|
self.registerFileForCleanUp(second_temp)
|
|
|
|
urllib.urlretrieve(self.constructLocalFileUrl(test_support.TESTFN),
|
|
|
|
second_temp, hooktester)
|
|
|
|
|
|
|
|
def test_reporthook_0_bytes(self):
|
|
|
|
# Test on zero length file. Should call reporthook only 1 time.
|
|
|
|
report = []
|
|
|
|
def hooktester(count, block_size, total_size, _report=report):
|
|
|
|
_report.append((count, block_size, total_size))
|
|
|
|
srcFileName = self.createNewTempFile()
|
|
|
|
urllib.urlretrieve(self.constructLocalFileUrl(srcFileName),
|
|
|
|
test_support.TESTFN, hooktester)
|
|
|
|
self.assertEqual(len(report), 1)
|
|
|
|
self.assertEqual(report[0][2], 0)
|
|
|
|
|
|
|
|
def test_reporthook_5_bytes(self):
|
|
|
|
# Test on 5 byte file. Should call reporthook only 2 times (once when
|
|
|
|
# the "network connection" is established and once when the block is
|
|
|
|
# read). Since the block size is 8192 bytes, only one block read is
|
|
|
|
# required to read the entire file.
|
|
|
|
report = []
|
|
|
|
def hooktester(count, block_size, total_size, _report=report):
|
|
|
|
_report.append((count, block_size, total_size))
|
|
|
|
srcFileName = self.createNewTempFile("x" * 5)
|
|
|
|
urllib.urlretrieve(self.constructLocalFileUrl(srcFileName),
|
|
|
|
test_support.TESTFN, hooktester)
|
|
|
|
self.assertEqual(len(report), 2)
|
|
|
|
self.assertEqual(report[0][1], 8192)
|
|
|
|
self.assertEqual(report[0][2], 5)
|
|
|
|
|
|
|
|
def test_reporthook_8193_bytes(self):
|
|
|
|
# Test on 8193 byte file. Should call reporthook only 3 times (once
|
|
|
|
# when the "network connection" is established, once for the next 8192
|
|
|
|
# bytes, and once for the last byte).
|
|
|
|
report = []
|
|
|
|
def hooktester(count, block_size, total_size, _report=report):
|
|
|
|
_report.append((count, block_size, total_size))
|
|
|
|
srcFileName = self.createNewTempFile("x" * 8193)
|
|
|
|
urllib.urlretrieve(self.constructLocalFileUrl(srcFileName),
|
|
|
|
test_support.TESTFN, hooktester)
|
|
|
|
self.assertEqual(len(report), 3)
|
|
|
|
self.assertEqual(report[0][1], 8192)
|
|
|
|
self.assertEqual(report[0][2], 8193)
|
2001-01-28 17:12:22 -04:00
|
|
|
|
2011-10-31 15:44:45 -03:00
|
|
|
|
|
|
|
class urlretrieve_HttpTests(unittest.TestCase, FakeHTTPMixin):
|
|
|
|
"""Test urllib.urlretrieve() using fake http connections"""
|
|
|
|
|
|
|
|
def test_short_content_raises_ContentTooShortError(self):
|
|
|
|
self.fakehttp('''HTTP/1.1 200 OK
|
|
|
|
Date: Wed, 02 Jan 2008 03:03:54 GMT
|
|
|
|
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
|
|
|
|
Connection: close
|
|
|
|
Content-Length: 100
|
|
|
|
Content-Type: text/html; charset=iso-8859-1
|
|
|
|
|
|
|
|
FF
|
|
|
|
''')
|
|
|
|
|
|
|
|
def _reporthook(par1, par2, par3):
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.assertRaises(urllib.ContentTooShortError, urllib.urlretrieve,
|
|
|
|
'http://example.com', reporthook=_reporthook)
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
|
|
|
def test_short_content_raises_ContentTooShortError_without_reporthook(self):
|
|
|
|
self.fakehttp('''HTTP/1.1 200 OK
|
|
|
|
Date: Wed, 02 Jan 2008 03:03:54 GMT
|
|
|
|
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
|
|
|
|
Connection: close
|
|
|
|
Content-Length: 100
|
|
|
|
Content-Type: text/html; charset=iso-8859-1
|
|
|
|
|
|
|
|
FF
|
|
|
|
''')
|
|
|
|
try:
|
|
|
|
self.assertRaises(urllib.ContentTooShortError, urllib.urlretrieve, 'http://example.com/')
|
|
|
|
finally:
|
|
|
|
self.unfakehttp()
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
class QuotingTests(unittest.TestCase):
|
|
|
|
"""Tests for urllib.quote() and urllib.quote_plus()
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
According to RFC 2396 ("Uniform Resource Identifiers), to escape a
|
|
|
|
character you write it as '%' + <2 character US-ASCII hex value>. The Python
|
|
|
|
code of ``'%' + hex(ord(<character>))[2:]`` escapes a character properly.
|
|
|
|
Case does not matter on the hex letters.
|
|
|
|
|
|
|
|
The various character sets specified are:
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
Reserved characters : ";/?:@&=+$,"
|
|
|
|
Have special meaning in URIs and must be escaped if not being used for
|
|
|
|
their special meaning
|
|
|
|
Data characters : letters, digits, and "-_.!~*'()"
|
|
|
|
Unreserved and do not need to be escaped; can be, though, if desired
|
|
|
|
Control characters : 0x00 - 0x1F, 0x7F
|
|
|
|
Have no use in URIs so must be escaped
|
|
|
|
space : 0x20
|
|
|
|
Must be escaped
|
|
|
|
Delimiters : '<>#%"'
|
|
|
|
Must be escaped
|
|
|
|
Unwise : "{}|\^[]`"
|
|
|
|
Must be escaped
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
"""
|
|
|
|
|
|
|
|
def test_never_quote(self):
|
|
|
|
# Make sure quote() does not quote letters, digits, and "_,.-"
|
|
|
|
do_not_quote = '' .join(["ABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
|
|
|
"abcdefghijklmnopqrstuvwxyz",
|
|
|
|
"0123456789",
|
|
|
|
"_.-"])
|
|
|
|
result = urllib.quote(do_not_quote)
|
|
|
|
self.assertEqual(do_not_quote, result,
|
|
|
|
"using quote(): %s != %s" % (do_not_quote, result))
|
|
|
|
result = urllib.quote_plus(do_not_quote)
|
|
|
|
self.assertEqual(do_not_quote, result,
|
|
|
|
"using quote_plus(): %s != %s" % (do_not_quote, result))
|
|
|
|
|
|
|
|
def test_default_safe(self):
|
|
|
|
# Test '/' is default value for 'safe' parameter
|
|
|
|
self.assertEqual(urllib.quote.func_defaults[0], '/')
|
|
|
|
|
|
|
|
def test_safe(self):
|
|
|
|
# Test setting 'safe' parameter does what it should do
|
|
|
|
quote_by_default = "<>"
|
|
|
|
result = urllib.quote(quote_by_default, safe=quote_by_default)
|
|
|
|
self.assertEqual(quote_by_default, result,
|
|
|
|
"using quote(): %s != %s" % (quote_by_default, result))
|
|
|
|
result = urllib.quote_plus(quote_by_default, safe=quote_by_default)
|
|
|
|
self.assertEqual(quote_by_default, result,
|
|
|
|
"using quote_plus(): %s != %s" %
|
|
|
|
(quote_by_default, result))
|
|
|
|
|
|
|
|
def test_default_quoting(self):
|
|
|
|
# Make sure all characters that should be quoted are by default sans
|
|
|
|
# space (separate test for that).
|
|
|
|
should_quote = [chr(num) for num in range(32)] # For 0x00 - 0x1F
|
|
|
|
should_quote.append('<>#%"{}|\^[]`')
|
|
|
|
should_quote.append(chr(127)) # For 0x7F
|
|
|
|
should_quote = ''.join(should_quote)
|
|
|
|
for char in should_quote:
|
|
|
|
result = urllib.quote(char)
|
|
|
|
self.assertEqual(hexescape(char), result,
|
|
|
|
"using quote(): %s should be escaped to %s, not %s" %
|
|
|
|
(char, hexescape(char), result))
|
|
|
|
result = urllib.quote_plus(char)
|
|
|
|
self.assertEqual(hexescape(char), result,
|
|
|
|
"using quote_plus(): "
|
2003-05-12 17:19:37 -03:00
|
|
|
"%s should be escapes to %s, not %s" %
|
2003-04-25 06:39:47 -03:00
|
|
|
(char, hexescape(char), result))
|
|
|
|
del should_quote
|
|
|
|
partial_quote = "ab[]cd"
|
|
|
|
expected = "ab%5B%5Dcd"
|
|
|
|
result = urllib.quote(partial_quote)
|
|
|
|
self.assertEqual(expected, result,
|
|
|
|
"using quote(): %s != %s" % (expected, result))
|
2011-09-12 19:42:21 -03:00
|
|
|
result = urllib.quote_plus(partial_quote)
|
2003-04-25 06:39:47 -03:00
|
|
|
self.assertEqual(expected, result,
|
|
|
|
"using quote_plus(): %s != %s" % (expected, result))
|
2010-07-19 14:35:50 -03:00
|
|
|
self.assertRaises(TypeError, urllib.quote, None)
|
2003-04-25 06:39:47 -03:00
|
|
|
|
|
|
|
def test_quoting_space(self):
|
|
|
|
# Make sure quote() and quote_plus() handle spaces as specified in
|
|
|
|
# their unique way
|
|
|
|
result = urllib.quote(' ')
|
|
|
|
self.assertEqual(result, hexescape(' '),
|
|
|
|
"using quote(): %s != %s" % (result, hexescape(' ')))
|
|
|
|
result = urllib.quote_plus(' ')
|
|
|
|
self.assertEqual(result, '+',
|
|
|
|
"using quote_plus(): %s != +" % result)
|
|
|
|
given = "a b cd e f"
|
|
|
|
expect = given.replace(' ', hexescape(' '))
|
|
|
|
result = urllib.quote(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using quote(): %s != %s" % (expect, result))
|
|
|
|
expect = given.replace(' ', '+')
|
|
|
|
result = urllib.quote_plus(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using quote_plus(): %s != %s" % (expect, result))
|
|
|
|
|
2005-09-10 11:30:09 -03:00
|
|
|
def test_quoting_plus(self):
|
|
|
|
self.assertEqual(urllib.quote_plus('alpha+beta gamma'),
|
|
|
|
'alpha%2Bbeta+gamma')
|
|
|
|
self.assertEqual(urllib.quote_plus('alpha+beta gamma', '+'),
|
|
|
|
'alpha+beta+gamma')
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
class UnquotingTests(unittest.TestCase):
|
|
|
|
"""Tests for unquote() and unquote_plus()
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
See the doc string for quoting_Tests for details on quoting and such.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def test_unquoting(self):
|
|
|
|
# Make sure unquoting of all ASCII values works
|
|
|
|
escape_list = []
|
|
|
|
for num in range(128):
|
|
|
|
given = hexescape(chr(num))
|
|
|
|
expect = chr(num)
|
|
|
|
result = urllib.unquote(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using unquote(): %s != %s" % (expect, result))
|
|
|
|
result = urllib.unquote_plus(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using unquote_plus(): %s != %s" %
|
|
|
|
(expect, result))
|
|
|
|
escape_list.append(given)
|
|
|
|
escape_string = ''.join(escape_list)
|
|
|
|
del escape_list
|
|
|
|
result = urllib.unquote(escape_string)
|
|
|
|
self.assertEqual(result.count('%'), 1,
|
|
|
|
"using quote(): not all characters escaped; %s" %
|
|
|
|
result)
|
|
|
|
result = urllib.unquote(escape_string)
|
|
|
|
self.assertEqual(result.count('%'), 1,
|
|
|
|
"using unquote(): not all characters escaped: "
|
|
|
|
"%s" % result)
|
|
|
|
|
2010-03-18 09:14:15 -03:00
|
|
|
def test_unquoting_badpercent(self):
|
|
|
|
# Test unquoting on bad percent-escapes
|
|
|
|
given = '%xab'
|
|
|
|
expect = given
|
|
|
|
result = urllib.unquote(given)
|
|
|
|
self.assertEqual(expect, result, "using unquote(): %r != %r"
|
|
|
|
% (expect, result))
|
|
|
|
given = '%x'
|
|
|
|
expect = given
|
|
|
|
result = urllib.unquote(given)
|
|
|
|
self.assertEqual(expect, result, "using unquote(): %r != %r"
|
|
|
|
% (expect, result))
|
|
|
|
given = '%'
|
|
|
|
expect = given
|
|
|
|
result = urllib.unquote(given)
|
|
|
|
self.assertEqual(expect, result, "using unquote(): %r != %r"
|
|
|
|
% (expect, result))
|
|
|
|
|
|
|
|
def test_unquoting_mixed_case(self):
|
|
|
|
# Test unquoting on mixed-case hex digits in the percent-escapes
|
|
|
|
given = '%Ab%eA'
|
|
|
|
expect = '\xab\xea'
|
|
|
|
result = urllib.unquote(given)
|
|
|
|
self.assertEqual(expect, result, "using unquote(): %r != %r"
|
|
|
|
% (expect, result))
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
def test_unquoting_parts(self):
|
|
|
|
# Make sure unquoting works when have non-quoted characters
|
|
|
|
# interspersed
|
|
|
|
given = 'ab%sd' % hexescape('c')
|
|
|
|
expect = "abcd"
|
|
|
|
result = urllib.unquote(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using quote(): %s != %s" % (expect, result))
|
|
|
|
result = urllib.unquote_plus(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using unquote_plus(): %s != %s" % (expect, result))
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
def test_unquoting_plus(self):
|
|
|
|
# Test difference between unquote() and unquote_plus()
|
|
|
|
given = "are+there+spaces..."
|
|
|
|
expect = given
|
|
|
|
result = urllib.unquote(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using unquote(): %s != %s" % (expect, result))
|
|
|
|
expect = given.replace('+', ' ')
|
|
|
|
result = urllib.unquote_plus(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"using unquote_plus(): %s != %s" % (expect, result))
|
|
|
|
|
2005-10-15 13:41:53 -03:00
|
|
|
def test_unquote_with_unicode(self):
|
|
|
|
r = urllib.unquote(u'br%C3%BCckner_sapporo_20050930.doc')
|
|
|
|
self.assertEqual(r, u'br\xc3\xbcckner_sapporo_20050930.doc')
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
class urlencode_Tests(unittest.TestCase):
|
|
|
|
"""Tests for urlencode()"""
|
|
|
|
|
|
|
|
def help_inputtype(self, given, test_type):
|
|
|
|
"""Helper method for testing different input types.
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
'given' must lead to only the pairs:
|
|
|
|
* 1st, 1
|
|
|
|
* 2nd, 2
|
|
|
|
* 3rd, 3
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
Test cannot assume anything about order. Docs make no guarantee and
|
|
|
|
have possible dictionary input.
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
"""
|
|
|
|
expect_somewhere = ["1st=1", "2nd=2", "3rd=3"]
|
|
|
|
result = urllib.urlencode(given)
|
|
|
|
for expected in expect_somewhere:
|
2010-01-23 19:04:36 -04:00
|
|
|
self.assertIn(expected, result,
|
2003-04-25 06:39:47 -03:00
|
|
|
"testing %s: %s not found in %s" %
|
|
|
|
(test_type, expected, result))
|
|
|
|
self.assertEqual(result.count('&'), 2,
|
|
|
|
"testing %s: expected 2 '&'s; got %s" %
|
|
|
|
(test_type, result.count('&')))
|
|
|
|
amp_location = result.index('&')
|
|
|
|
on_amp_left = result[amp_location - 1]
|
|
|
|
on_amp_right = result[amp_location + 1]
|
2009-06-30 19:57:08 -03:00
|
|
|
self.assertTrue(on_amp_left.isdigit() and on_amp_right.isdigit(),
|
2003-04-25 06:39:47 -03:00
|
|
|
"testing %s: '&' not located in proper place in %s" %
|
|
|
|
(test_type, result))
|
|
|
|
self.assertEqual(len(result), (5 * 3) + 2, #5 chars per thing and amps
|
|
|
|
"testing %s: "
|
|
|
|
"unexpected number of characters: %s != %s" %
|
|
|
|
(test_type, len(result), (5 * 3) + 2))
|
|
|
|
|
|
|
|
def test_using_mapping(self):
|
|
|
|
# Test passing in a mapping object as an argument.
|
|
|
|
self.help_inputtype({"1st":'1', "2nd":'2', "3rd":'3'},
|
|
|
|
"using dict as input type")
|
|
|
|
|
|
|
|
def test_using_sequence(self):
|
|
|
|
# Test passing in a sequence of two-item sequences as an argument.
|
|
|
|
self.help_inputtype([('1st', '1'), ('2nd', '2'), ('3rd', '3')],
|
|
|
|
"using sequence of two-item tuples as input")
|
|
|
|
|
|
|
|
def test_quoting(self):
|
|
|
|
# Make sure keys and values are quoted using quote_plus()
|
|
|
|
given = {"&":"="}
|
|
|
|
expect = "%s=%s" % (hexescape('&'), hexescape('='))
|
|
|
|
result = urllib.urlencode(given)
|
|
|
|
self.assertEqual(expect, result)
|
|
|
|
given = {"key name":"A bunch of pluses"}
|
|
|
|
expect = "key+name=A+bunch+of+pluses"
|
|
|
|
result = urllib.urlencode(given)
|
|
|
|
self.assertEqual(expect, result)
|
|
|
|
|
|
|
|
def test_doseq(self):
|
|
|
|
# Test that passing True for 'doseq' parameter works correctly
|
|
|
|
given = {'sequence':['1', '2', '3']}
|
|
|
|
expect = "sequence=%s" % urllib.quote_plus(str(['1', '2', '3']))
|
|
|
|
result = urllib.urlencode(given)
|
|
|
|
self.assertEqual(expect, result)
|
|
|
|
result = urllib.urlencode(given, True)
|
|
|
|
for value in given["sequence"]:
|
|
|
|
expect = "sequence=%s" % value
|
2010-01-23 19:04:36 -04:00
|
|
|
self.assertIn(expect, result)
|
2003-04-25 06:39:47 -03:00
|
|
|
self.assertEqual(result.count('&'), 2,
|
|
|
|
"Expected 2 '&'s, got %s" % result.count('&'))
|
|
|
|
|
|
|
|
class Pathname_Tests(unittest.TestCase):
|
|
|
|
"""Test pathname2url() and url2pathname()"""
|
|
|
|
|
|
|
|
def test_basic(self):
|
|
|
|
# Make sure simple tests pass
|
|
|
|
expected_path = os.path.join("parts", "of", "a", "path")
|
|
|
|
expected_url = "parts/of/a/path"
|
|
|
|
result = urllib.pathname2url(expected_path)
|
|
|
|
self.assertEqual(expected_url, result,
|
|
|
|
"pathname2url() failed; %s != %s" %
|
|
|
|
(result, expected_url))
|
|
|
|
result = urllib.url2pathname(expected_url)
|
|
|
|
self.assertEqual(expected_path, result,
|
|
|
|
"url2pathame() failed; %s != %s" %
|
|
|
|
(result, expected_path))
|
|
|
|
|
|
|
|
def test_quoting(self):
|
|
|
|
# Test automatic quoting and unquoting works for pathnam2url() and
|
|
|
|
# url2pathname() respectively
|
|
|
|
given = os.path.join("needs", "quot=ing", "here")
|
|
|
|
expect = "needs/%s/here" % urllib.quote("quot=ing")
|
|
|
|
result = urllib.pathname2url(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"pathname2url() failed; %s != %s" %
|
|
|
|
(expect, result))
|
|
|
|
expect = given
|
|
|
|
result = urllib.url2pathname(result)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"url2pathname() failed; %s != %s" %
|
|
|
|
(expect, result))
|
|
|
|
given = os.path.join("make sure", "using_quote")
|
|
|
|
expect = "%s/using_quote" % urllib.quote("make sure")
|
|
|
|
result = urllib.pathname2url(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"pathname2url() failed; %s != %s" %
|
|
|
|
(expect, result))
|
|
|
|
given = "make+sure/using_unquote"
|
|
|
|
expect = os.path.join("make+sure", "using_unquote")
|
|
|
|
result = urllib.url2pathname(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
"url2pathname() failed; %s != %s" %
|
|
|
|
(expect, result))
|
2003-05-12 17:19:37 -03:00
|
|
|
|
2011-04-14 01:54:35 -03:00
|
|
|
@unittest.skipUnless(sys.platform == 'win32',
|
|
|
|
'test specific to the nturl2path library')
|
|
|
|
def test_ntpath(self):
|
|
|
|
given = ('/C:/', '///C:/', '/C|//')
|
|
|
|
expect = 'C:\\'
|
|
|
|
for url in given:
|
|
|
|
result = urllib.url2pathname(url)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
'nturl2path.url2pathname() failed; %s != %s' %
|
|
|
|
(expect, result))
|
|
|
|
given = '///C|/path'
|
|
|
|
expect = 'C:\\path'
|
|
|
|
result = urllib.url2pathname(given)
|
|
|
|
self.assertEqual(expect, result,
|
|
|
|
'nturl2path.url2pathname() failed; %s != %s' %
|
|
|
|
(expect, result))
|
|
|
|
|
2009-03-30 18:51:50 -03:00
|
|
|
class Utility_Tests(unittest.TestCase):
|
|
|
|
"""Testcase to test the various utility functions in the urllib."""
|
2015-03-02 10:31:57 -04:00
|
|
|
# In Python 3 this test class is moved to test_urlparse.
|
|
|
|
|
|
|
|
def test_splittype(self):
|
|
|
|
splittype = urllib.splittype
|
|
|
|
self.assertEqual(splittype('type:opaquestring'), ('type', 'opaquestring'))
|
|
|
|
self.assertEqual(splittype('opaquestring'), (None, 'opaquestring'))
|
|
|
|
self.assertEqual(splittype(':opaquestring'), (None, ':opaquestring'))
|
|
|
|
self.assertEqual(splittype('type:'), ('type', ''))
|
|
|
|
self.assertEqual(splittype('type:opaque:string'), ('type', 'opaque:string'))
|
|
|
|
|
|
|
|
def test_splithost(self):
|
|
|
|
splithost = urllib.splithost
|
|
|
|
self.assertEqual(splithost('//www.example.org:80/foo/bar/baz.html'),
|
|
|
|
('www.example.org:80', '/foo/bar/baz.html'))
|
|
|
|
self.assertEqual(splithost('//www.example.org:80'),
|
|
|
|
('www.example.org:80', ''))
|
|
|
|
self.assertEqual(splithost('/foo/bar/baz.html'),
|
|
|
|
(None, '/foo/bar/baz.html'))
|
|
|
|
|
2017-06-20 11:20:36 -03:00
|
|
|
# bpo-30500: # starts a fragment.
|
|
|
|
self.assertEqual(splithost('//127.0.0.1#@host.com'),
|
|
|
|
('127.0.0.1', '/#@host.com'))
|
|
|
|
self.assertEqual(splithost('//127.0.0.1#@host.com:80'),
|
|
|
|
('127.0.0.1', '/#@host.com:80'))
|
|
|
|
self.assertEqual(splithost('//127.0.0.1:80#@host.com'),
|
|
|
|
('127.0.0.1:80', '/#@host.com'))
|
|
|
|
|
|
|
|
# Empty host is returned as empty string.
|
|
|
|
self.assertEqual(splithost("///file"),
|
|
|
|
('', '/file'))
|
|
|
|
|
|
|
|
# Trailing semicolon, question mark and hash symbol are kept.
|
|
|
|
self.assertEqual(splithost("//example.net/file;"),
|
|
|
|
('example.net', '/file;'))
|
|
|
|
self.assertEqual(splithost("//example.net/file?"),
|
|
|
|
('example.net', '/file?'))
|
|
|
|
self.assertEqual(splithost("//example.net/file#"),
|
|
|
|
('example.net', '/file#'))
|
|
|
|
|
2015-03-02 10:31:57 -04:00
|
|
|
def test_splituser(self):
|
|
|
|
splituser = urllib.splituser
|
|
|
|
self.assertEqual(splituser('User:Pass@www.python.org:080'),
|
|
|
|
('User:Pass', 'www.python.org:080'))
|
|
|
|
self.assertEqual(splituser('@www.python.org:080'),
|
|
|
|
('', 'www.python.org:080'))
|
|
|
|
self.assertEqual(splituser('www.python.org:080'),
|
|
|
|
(None, 'www.python.org:080'))
|
|
|
|
self.assertEqual(splituser('User:Pass@'),
|
|
|
|
('User:Pass', ''))
|
|
|
|
self.assertEqual(splituser('User@example.com:Pass@www.python.org:080'),
|
|
|
|
('User@example.com:Pass', 'www.python.org:080'))
|
2009-03-30 18:51:50 -03:00
|
|
|
|
|
|
|
def test_splitpasswd(self):
|
2015-03-02 10:31:57 -04:00
|
|
|
# Some of the password examples are not sensible, but it is added to
|
|
|
|
# confirming to RFC2617 and addressing issue4675.
|
|
|
|
splitpasswd = urllib.splitpasswd
|
|
|
|
self.assertEqual(splitpasswd('user:ab'), ('user', 'ab'))
|
|
|
|
self.assertEqual(splitpasswd('user:a\nb'), ('user', 'a\nb'))
|
|
|
|
self.assertEqual(splitpasswd('user:a\tb'), ('user', 'a\tb'))
|
|
|
|
self.assertEqual(splitpasswd('user:a\rb'), ('user', 'a\rb'))
|
|
|
|
self.assertEqual(splitpasswd('user:a\fb'), ('user', 'a\fb'))
|
|
|
|
self.assertEqual(splitpasswd('user:a\vb'), ('user', 'a\vb'))
|
|
|
|
self.assertEqual(splitpasswd('user:a:b'), ('user', 'a:b'))
|
|
|
|
self.assertEqual(splitpasswd('user:a b'), ('user', 'a b'))
|
|
|
|
self.assertEqual(splitpasswd('user 2:ab'), ('user 2', 'ab'))
|
|
|
|
self.assertEqual(splitpasswd('user+1:a+b'), ('user+1', 'a+b'))
|
|
|
|
self.assertEqual(splitpasswd('user:'), ('user', ''))
|
|
|
|
self.assertEqual(splitpasswd('user'), ('user', None))
|
|
|
|
self.assertEqual(splitpasswd(':ab'), ('', 'ab'))
|
2009-03-30 18:51:50 -03:00
|
|
|
|
2014-01-18 12:30:09 -04:00
|
|
|
def test_splitport(self):
|
|
|
|
splitport = urllib.splitport
|
|
|
|
self.assertEqual(splitport('parrot:88'), ('parrot', '88'))
|
|
|
|
self.assertEqual(splitport('parrot'), ('parrot', None))
|
|
|
|
self.assertEqual(splitport('parrot:'), ('parrot', None))
|
|
|
|
self.assertEqual(splitport('127.0.0.1'), ('127.0.0.1', None))
|
|
|
|
self.assertEqual(splitport('parrot:cheese'), ('parrot:cheese', None))
|
2015-03-02 10:31:57 -04:00
|
|
|
self.assertEqual(splitport('[::1]:88'), ('[::1]', '88'))
|
|
|
|
self.assertEqual(splitport('[::1]'), ('[::1]', None))
|
|
|
|
self.assertEqual(splitport(':88'), ('', '88'))
|
2014-01-18 12:30:09 -04:00
|
|
|
|
|
|
|
def test_splitnport(self):
|
|
|
|
splitnport = urllib.splitnport
|
|
|
|
self.assertEqual(splitnport('parrot:88'), ('parrot', 88))
|
|
|
|
self.assertEqual(splitnport('parrot'), ('parrot', -1))
|
|
|
|
self.assertEqual(splitnport('parrot', 55), ('parrot', 55))
|
|
|
|
self.assertEqual(splitnport('parrot:'), ('parrot', -1))
|
|
|
|
self.assertEqual(splitnport('parrot:', 55), ('parrot', 55))
|
|
|
|
self.assertEqual(splitnport('127.0.0.1'), ('127.0.0.1', -1))
|
|
|
|
self.assertEqual(splitnport('127.0.0.1', 55), ('127.0.0.1', 55))
|
|
|
|
self.assertEqual(splitnport('parrot:cheese'), ('parrot', None))
|
|
|
|
self.assertEqual(splitnport('parrot:cheese', 55), ('parrot', None))
|
|
|
|
|
2015-03-02 10:31:57 -04:00
|
|
|
def test_splitquery(self):
|
|
|
|
# Normal cases are exercised by other tests; ensure that we also
|
|
|
|
# catch cases with no port specified (testcase ensuring coverage)
|
|
|
|
splitquery = urllib.splitquery
|
|
|
|
self.assertEqual(splitquery('http://python.org/fake?foo=bar'),
|
|
|
|
('http://python.org/fake', 'foo=bar'))
|
|
|
|
self.assertEqual(splitquery('http://python.org/fake?foo=bar?'),
|
|
|
|
('http://python.org/fake?foo=bar', ''))
|
|
|
|
self.assertEqual(splitquery('http://python.org/fake'),
|
|
|
|
('http://python.org/fake', None))
|
|
|
|
self.assertEqual(splitquery('?foo=bar'), ('', 'foo=bar'))
|
|
|
|
|
|
|
|
def test_splittag(self):
|
|
|
|
splittag = urllib.splittag
|
|
|
|
self.assertEqual(splittag('http://example.com?foo=bar#baz'),
|
|
|
|
('http://example.com?foo=bar', 'baz'))
|
|
|
|
self.assertEqual(splittag('http://example.com?foo=bar#'),
|
|
|
|
('http://example.com?foo=bar', ''))
|
|
|
|
self.assertEqual(splittag('#baz'), ('', 'baz'))
|
|
|
|
self.assertEqual(splittag('http://example.com?foo=bar'),
|
|
|
|
('http://example.com?foo=bar', None))
|
|
|
|
self.assertEqual(splittag('http://example.com?foo=bar#baz#boo'),
|
|
|
|
('http://example.com?foo=bar#baz', 'boo'))
|
|
|
|
|
|
|
|
def test_splitattr(self):
|
|
|
|
splitattr = urllib.splitattr
|
|
|
|
self.assertEqual(splitattr('/path;attr1=value1;attr2=value2'),
|
|
|
|
('/path', ['attr1=value1', 'attr2=value2']))
|
|
|
|
self.assertEqual(splitattr('/path;'), ('/path', ['']))
|
|
|
|
self.assertEqual(splitattr(';attr1=value1;attr2=value2'),
|
|
|
|
('', ['attr1=value1', 'attr2=value2']))
|
|
|
|
self.assertEqual(splitattr('/path'), ('/path', []))
|
|
|
|
|
|
|
|
def test_splitvalue(self):
|
|
|
|
# Normal cases are exercised by other tests; test pathological cases
|
|
|
|
# with no key/value pairs. (testcase ensuring coverage)
|
|
|
|
splitvalue = urllib.splitvalue
|
|
|
|
self.assertEqual(splitvalue('foo=bar'), ('foo', 'bar'))
|
|
|
|
self.assertEqual(splitvalue('foo='), ('foo', ''))
|
|
|
|
self.assertEqual(splitvalue('=bar'), ('', 'bar'))
|
|
|
|
self.assertEqual(splitvalue('foobar'), ('foobar', None))
|
|
|
|
self.assertEqual(splitvalue('foo=bar=baz'), ('foo', 'bar=baz'))
|
|
|
|
|
|
|
|
def test_toBytes(self):
|
|
|
|
result = urllib.toBytes(u'http://www.python.org')
|
|
|
|
self.assertEqual(result, 'http://www.python.org')
|
|
|
|
self.assertRaises(UnicodeError, urllib.toBytes,
|
|
|
|
test_support.u(r'http://www.python.org/medi\u00e6val'))
|
|
|
|
|
|
|
|
def test_unwrap(self):
|
|
|
|
url = urllib.unwrap('<URL:type://host/path>')
|
|
|
|
self.assertEqual(url, 'type://host/path')
|
|
|
|
|
2009-03-30 18:51:50 -03:00
|
|
|
|
2009-04-21 00:24:19 -03:00
|
|
|
class URLopener_Tests(unittest.TestCase):
|
|
|
|
"""Testcase to test the open method of URLopener class."""
|
|
|
|
|
|
|
|
def test_quoted_open(self):
|
|
|
|
class DummyURLopener(urllib.URLopener):
|
|
|
|
def open_spam(self, url):
|
|
|
|
return url
|
|
|
|
|
|
|
|
self.assertEqual(DummyURLopener().open(
|
|
|
|
'spam://example/ /'),'//example/%20/')
|
|
|
|
|
2010-02-20 18:05:34 -04:00
|
|
|
# test the safe characters are not quoted by urlopen
|
|
|
|
self.assertEqual(DummyURLopener().open(
|
|
|
|
"spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"),
|
|
|
|
"//c:|windows%/:=&?~#+!$,;'@()*[]|/path/")
|
|
|
|
|
2019-05-21 18:12:23 -03:00
|
|
|
def test_local_file_open(self):
|
2019-05-22 18:28:03 -03:00
|
|
|
# bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme
|
2019-05-21 18:12:23 -03:00
|
|
|
class DummyURLopener(urllib.URLopener):
|
|
|
|
def open_local_file(self, url):
|
|
|
|
return url
|
|
|
|
for url in ('local_file://example', 'local-file://example'):
|
|
|
|
self.assertRaises(IOError, urllib.urlopen, url)
|
2019-05-22 18:28:03 -03:00
|
|
|
self.assertRaises(IOError, urllib.URLopener().open, url)
|
|
|
|
self.assertRaises(IOError, urllib.URLopener().retrieve, url)
|
|
|
|
self.assertRaises(IOError, DummyURLopener().open, url)
|
|
|
|
self.assertRaises(IOError, DummyURLopener().retrieve, url)
|
2009-04-21 00:24:19 -03:00
|
|
|
|
2007-05-25 01:20:22 -03:00
|
|
|
# Just commented them out.
|
|
|
|
# Can't really tell why keep failing in windows and sparc.
|
2011-03-16 07:34:31 -03:00
|
|
|
# Everywhere else they work ok, but on those machines, sometimes
|
2007-05-25 01:20:22 -03:00
|
|
|
# fail in one of the tests, sometimes in other. I have a linux, and
|
|
|
|
# the tests go ok.
|
2013-08-17 10:56:09 -03:00
|
|
|
# If anybody has one of the problematic environments, please help!
|
2007-05-25 01:20:22 -03:00
|
|
|
# . Facundo
|
|
|
|
#
|
|
|
|
# def server(evt):
|
2008-05-29 13:39:26 -03:00
|
|
|
# import socket, time
|
2007-05-25 01:20:22 -03:00
|
|
|
# serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
# serv.settimeout(3)
|
|
|
|
# serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
|
|
# serv.bind(("", 9093))
|
|
|
|
# serv.listen(5)
|
|
|
|
# try:
|
|
|
|
# conn, addr = serv.accept()
|
|
|
|
# conn.send("1 Hola mundo\n")
|
|
|
|
# cantdata = 0
|
|
|
|
# while cantdata < 13:
|
|
|
|
# data = conn.recv(13-cantdata)
|
|
|
|
# cantdata += len(data)
|
|
|
|
# time.sleep(.3)
|
|
|
|
# conn.send("2 No more lines\n")
|
|
|
|
# conn.close()
|
|
|
|
# except socket.timeout:
|
|
|
|
# pass
|
|
|
|
# finally:
|
|
|
|
# serv.close()
|
|
|
|
# evt.set()
|
|
|
|
#
|
|
|
|
# class FTPWrapperTests(unittest.TestCase):
|
|
|
|
#
|
|
|
|
# def setUp(self):
|
2008-05-29 13:39:26 -03:00
|
|
|
# import ftplib, time, threading
|
2007-05-25 01:20:22 -03:00
|
|
|
# ftplib.FTP.port = 9093
|
|
|
|
# self.evt = threading.Event()
|
|
|
|
# threading.Thread(target=server, args=(self.evt,)).start()
|
|
|
|
# time.sleep(.1)
|
|
|
|
#
|
|
|
|
# def tearDown(self):
|
|
|
|
# self.evt.wait()
|
|
|
|
#
|
|
|
|
# def testBasic(self):
|
|
|
|
# # connects
|
|
|
|
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
|
2008-05-29 13:39:26 -03:00
|
|
|
# ftp.close()
|
2007-05-25 01:20:22 -03:00
|
|
|
#
|
2008-05-29 13:39:26 -03:00
|
|
|
# def testTimeoutNone(self):
|
|
|
|
# # global default timeout is ignored
|
|
|
|
# import socket
|
2014-02-08 08:49:55 -04:00
|
|
|
# self.assertIsNone(socket.getdefaulttimeout())
|
2008-05-29 13:39:26 -03:00
|
|
|
# socket.setdefaulttimeout(30)
|
|
|
|
# try:
|
|
|
|
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
|
|
|
|
# finally:
|
|
|
|
# socket.setdefaulttimeout(None)
|
2007-05-25 01:20:22 -03:00
|
|
|
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
|
2008-05-29 13:39:26 -03:00
|
|
|
# ftp.close()
|
2007-05-25 01:20:22 -03:00
|
|
|
#
|
2008-05-29 13:39:26 -03:00
|
|
|
# def testTimeoutDefault(self):
|
|
|
|
# # global default timeout is used
|
|
|
|
# import socket
|
2014-02-08 08:49:55 -04:00
|
|
|
# self.assertIsNone(socket.getdefaulttimeout())
|
2007-05-25 01:20:22 -03:00
|
|
|
# socket.setdefaulttimeout(30)
|
|
|
|
# try:
|
|
|
|
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
|
|
|
|
# finally:
|
2008-05-29 13:39:26 -03:00
|
|
|
# socket.setdefaulttimeout(None)
|
2007-05-25 01:20:22 -03:00
|
|
|
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
|
2008-05-29 13:39:26 -03:00
|
|
|
# ftp.close()
|
2007-05-25 01:20:22 -03:00
|
|
|
#
|
2008-05-29 13:39:26 -03:00
|
|
|
# def testTimeoutValue(self):
|
|
|
|
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [],
|
|
|
|
# timeout=30)
|
|
|
|
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
|
|
|
|
# ftp.close()
|
2007-05-24 14:50:54 -03:00
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
|
|
|
|
|
|
|
|
def test_main():
|
2008-07-01 22:57:08 -03:00
|
|
|
import warnings
|
2008-09-08 21:49:16 -03:00
|
|
|
with warnings.catch_warnings():
|
2008-07-01 22:57:08 -03:00
|
|
|
warnings.filterwarnings('ignore', ".*urllib\.urlopen.*Python 3.0",
|
|
|
|
DeprecationWarning)
|
|
|
|
test_support.run_unittest(
|
|
|
|
urlopen_FileTests,
|
|
|
|
urlopen_HttpTests,
|
|
|
|
urlretrieve_FileTests,
|
2011-10-31 15:44:45 -03:00
|
|
|
urlretrieve_HttpTests,
|
2008-09-21 18:27:51 -03:00
|
|
|
ProxyTests,
|
2008-07-01 22:57:08 -03:00
|
|
|
QuotingTests,
|
|
|
|
UnquotingTests,
|
|
|
|
urlencode_Tests,
|
|
|
|
Pathname_Tests,
|
2009-03-30 18:51:50 -03:00
|
|
|
Utility_Tests,
|
2009-04-21 00:24:19 -03:00
|
|
|
URLopener_Tests,
|
2016-04-25 13:17:54 -03:00
|
|
|
ProxyTests,
|
|
|
|
ProxyTests_withOrderedEnv,
|
2008-07-01 22:57:08 -03:00
|
|
|
#FTPWrapperTests,
|
|
|
|
)
|
2001-01-28 17:12:22 -04:00
|
|
|
|
|
|
|
|
|
|
|
|
2003-04-25 06:39:47 -03:00
|
|
|
if __name__ == '__main__':
|
|
|
|
test_main()
|