Patch for issue 2848, mostly by Humberto Diogenes, with a couple of

small fixes by Barry.  This removes mimetools from the stdlib.
This commit is contained in:
Barry Warsaw 2008-06-12 04:06:45 +00:00
parent 75f25f2c9a
commit 820c120059
23 changed files with 146 additions and 202 deletions

View File

@ -89,7 +89,7 @@ def message(f, delimiter = ''):
t = time.mktime(tt)
else:
sys.stderr.write(
'Unparseable date: %r\n' % (m.getheader('Date'),))
'Unparseable date: %r\n' % (m.get('Date'),))
t = os.fstat(f.fileno())[stat.ST_MTIME]
print('From', email, time.ctime(t))
# Copy RFC822 header

View File

@ -249,6 +249,8 @@ def parse_multipart(fp, pdict):
since it can call parse_multipart().
"""
import http.client
boundary = ""
if 'boundary' in pdict:
boundary = pdict['boundary']
@ -266,8 +268,8 @@ def parse_multipart(fp, pdict):
data = None
if terminator:
# At start of next part. Read headers first.
headers = mimetools.Message(fp)
clength = headers.getheader('content-length')
headers = http.client.parse_headers(fp)
clength = headers.get('content-length')
if clength:
try:
bytes = int(clength)

View File

@ -68,11 +68,7 @@ class Parser:
data = fp.read(8192)
if not data:
break
# XXX When Guido fixes TextIOWrapper.read() to act just like
# .readlines(), this...
feedparser.feed(str(data))
# ...gets reverted back to
#feedparser.feed(data)
feedparser.feed(data)
return feedparser.close()
def parsestr(self, text, headersonly=False):

View File

@ -25,7 +25,6 @@ import time
import base64
import random
import socket
import urllib
import warnings
from io import StringIO
@ -235,6 +234,7 @@ def decode_params(params):
params is a sequence of 2-tuples containing (param name, string value).
"""
import urllib
# Copy params so we don't mess with the original
params = params[:]
new_params = []

View File

@ -67,8 +67,9 @@ Req-sent-unread-response _CS_REQ_SENT <response_class>
"""
import io
import mimetools
import socket
import email.parser
import email.message
from urlparse import urlsplit
import warnings
@ -201,110 +202,52 @@ responses = {
# maximal amount of data to read at one time in _safe_read
MAXAMOUNT = 1048576
class HTTPMessage(mimetools.Message):
class HTTPMessage(email.message.Message):
def getallmatchingheaders(self, name):
"""Find all header lines matching a given header name.
def addheader(self, key, value):
"""Add header for field key handling repeats."""
prev = self.dict.get(key)
if prev is None:
self.dict[key] = value
else:
combined = ", ".join((prev, value))
self.dict[key] = combined
Look through the list of headers and find all lines matching a given
header name (and their continuation lines). A list of the lines is
returned, without interpretation. If the header does not occur, an
empty list is returned. If the header occurs multiple times, all
occurrences are returned. Case is not important in the header name.
def addcontinue(self, key, more):
"""Add more field data from a continuation line."""
prev = self.dict[key]
self.dict[key] = prev + "\n " + more
def readheaders(self):
"""Read header lines.
Read header lines up to the entirely blank line that terminates them.
The (normally blank) line that ends the headers is skipped, but not
included in the returned list. If a non-header line ends the headers,
(which is an error), an attempt is made to backspace over it; it is
never included in the returned list.
The variable self.status is set to the empty string if all went well,
otherwise it is an error message. The variable self.headers is a
completely uninterpreted list of lines contained in the header (so
printing them will reproduce the header exactly as it appears in the
file).
If multiple header fields with the same name occur, they are combined
according to the rules in RFC 2616 sec 4.2:
Appending each subsequent field-value to the first, each separated
by a comma. The order in which header fields with the same field-name
are received is significant to the interpretation of the combined
field value.
"""
# XXX The implementation overrides the readheaders() method of
# rfc822.Message. The base class design isn't amenable to
# customized behavior here so the method here is a copy of the
# base class code with a few small changes.
# XXX: copied from rfc822.Message for compatibility
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.keys():
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(line)
return lst
self.dict = {}
self.unixfrom = ''
self.headers = hlist = []
self.status = ''
headerseen = ""
firstline = 1
startofline = unread = tell = None
if hasattr(self.fp, 'unread'):
unread = self.fp.unread
elif self.seekable:
tell = self.fp.tell
while True:
if tell:
try:
startofline = tell()
except IOError:
startofline = tell = None
self.seekable = 0
line = str(self.fp.readline(), "iso-8859-1")
if not line:
self.status = 'EOF in headers'
break
# Skip unix From name time lines
if firstline and line.startswith('From '):
self.unixfrom = self.unixfrom + line
continue
firstline = 0
if headerseen and line[0] in ' \t':
# XXX Not sure if continuation lines are handled properly
# for http and/or for repeating headers
# It's a continuation line.
hlist.append(line)
self.addcontinue(headerseen, line.strip())
continue
elif self.iscomment(line):
# It's a comment. Ignore it.
continue
elif self.islast(line):
# Note! No pushback here! The delimiter line gets eaten.
break
headerseen = self.isheader(line)
if headerseen:
# It's a legal header line, save it.
hlist.append(line)
self.addheader(headerseen, line[len(headerseen)+1:].strip())
continue
else:
# It's not a header line; throw it back and stop here.
if not self.dict:
self.status = 'No headers'
else:
self.status = 'Non-header line where header expected'
# Try to undo the read.
if unread:
unread(line)
elif tell:
self.fp.seek(startofline)
else:
self.status = self.status + '; bad seek'
break
def parse_headers(fp):
"""Parses only RFC2822 headers from a file pointer.
email Parser wants to see strings rather than bytes.
But a TextIOWrapper around self.rfile would buffer too many bytes
from the stream, bytes which we later need to read as bytes.
So we read the correct bytes here, as bytes, for email Parser
to parse.
"""
# XXX: Copied from http.server.BaseHTTPRequestHandler.parse_request,
# maybe we can just call this function from there.
headers = []
while True:
line = fp.readline()
headers.append(line)
if line in (b'\r\n', b'\n', b''):
break
hstring = b''.join(headers).decode('iso-8859-1')
return email.parser.Parser(_class=HTTPMessage).parsestr(hstring)
class HTTPResponse:
@ -418,19 +361,17 @@ class HTTPResponse:
self.length = None
self.chunked = 0
self.will_close = 1
self.msg = HTTPMessage(io.BytesIO())
self.msg = email.message_from_string('')
return
self.msg = HTTPMessage(self.fp, 0)
self.msg = parse_headers(self.fp)
if self.debuglevel > 0:
for hdr in self.msg.headers:
for hdr in self.msg:
print("header:", hdr, end=" ")
# don't let the msg keep an fp
self.msg.fp = None
# are we using the chunked-style of transfer encoding?
tr_enc = self.msg.getheader("transfer-encoding")
tr_enc = self.msg.get("transfer-encoding")
if tr_enc and tr_enc.lower() == "chunked":
self.chunked = 1
self.chunk_left = None
@ -443,7 +384,10 @@ class HTTPResponse:
# do we have a Content-Length?
# NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
self.length = None
length = self.msg.getheader("content-length")
length = self.msg.get("content-length")
# are we using the chunked-style of transfer encoding?
tr_enc = self.msg.get("transfer-encoding")
if length and not self.chunked:
try:
self.length = int(length)
@ -470,11 +414,11 @@ class HTTPResponse:
self.will_close = 1
def _check_close(self):
conn = self.msg.getheader("connection")
conn = self.msg.get("connection")
if self.version == 11:
# An HTTP/1.1 proxy is assumed to stay open unless
# explicitly closed.
conn = self.msg.getheader("connection")
conn = self.msg.get("connection")
if conn and "close" in conn.lower():
return True
return False
@ -483,7 +427,7 @@ class HTTPResponse:
# connections, using rules different than HTTP/1.1.
# For older HTTP, Keep-Alive indicates persistent connection.
if self.msg.getheader("keep-alive"):
if self.msg.get("keep-alive"):
return False
# At least Akamai returns a "Connection: Keep-Alive" header,
@ -492,7 +436,7 @@ class HTTPResponse:
return False
# Proxy-Connection is a netscape hack.
pconn = self.msg.getheader("proxy-connection")
pconn = self.msg.get("proxy-connection")
if pconn and "keep-alive" in pconn.lower():
return False
@ -644,7 +588,7 @@ class HTTPResponse:
def getheader(self, name, default=None):
if self.msg is None:
raise ResponseNotReady()
return self.msg.getheader(name, default)
return ', '.join(self.msg.get_all(name, default))
def getheaders(self):
"""Return list of (header, value) tuples."""

View File

@ -1547,8 +1547,8 @@ class CookieJar:
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965_hdrs = headers.get_all("Set-Cookie2", [])
ns_hdrs = headers.get_all("Set-Cookie", [])
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape

View File

@ -95,10 +95,11 @@ import socket # For gethostbyaddr()
import shutil
import urllib
import select
import mimetools
import mimetypes
import posixpath
import socketserver
import email.message
import email.parser
# Default error message template
DEFAULT_ERROR_MESSAGE = """\
@ -211,7 +212,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
- command, path and version are the broken-down request line;
- headers is an instance of mimetools.Message (or a derived
- headers is an instance of email.message.Message (or a derived
class) containing the header information;
- rfile is a file object open for reading positioned at the
@ -326,7 +327,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
if line in (b'\r\n', b'\n', b''):
break
hfile = io.StringIO(b''.join(headers).decode('iso-8859-1'))
self.headers = self.MessageClass(hfile)
self.headers = email.parser.Parser(_class=self.MessageClass).parse(hfile)
conntype = self.headers.get('Connection', "")
if conntype.lower() == 'close':
@ -524,8 +525,9 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
# Set this to HTTP/1.1 to enable automatic keepalive
protocol_version = "HTTP/1.0"
# The Message-like class used to parse headers
MessageClass = mimetools.Message
# MessageClass used to parse headers
import http.client
MessageClass = http.client.HTTPMessage
# Table mapping response codes to messages; entries have the
# form {code: (shortmessage, longmessage)}.
@ -955,7 +957,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
authorization = self.headers.get("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
@ -973,14 +975,14 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
if self.headers.get('content-type') is None:
env['CONTENT_TYPE'] = self.headers.get_content_type()
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
env['CONTENT_TYPE'] = self.headers['content-type']
length = self.headers.get('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
referer = self.headers.get('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
@ -990,10 +992,10 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
ua = self.headers.get('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
co = filter(None, self.headers.get_all('cookie', []))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers

View File

@ -1910,8 +1910,8 @@ def serve(port, callback=None, completer=None):
def __init__(self, fp, seekable=1):
Message = self.__class__
Message.__bases__[0].__bases__[0].__init__(self, fp, seekable)
self.encodingheader = self.getheader('content-transfer-encoding')
self.typeheader = self.getheader('content-type')
self.encodingheader = self.get('content-transfer-encoding')
self.typeheader = self.get('content-type')
self.parsetype()
self.parseplist()

View File

@ -421,9 +421,9 @@ class MailmanProxy(PureProxy):
# These headers are required for the proper execution of Mailman. All
# MTAs in existance seem to add these if the original message doesn't
# have them.
if not msg.getheader('from'):
if not msg.get('from'):
msg['From'] = mailfrom
if not msg.getheader('date'):
if not msg.get('date'):
msg['Date'] = time.ctime(time.time())
for rcpt, listname, command in listnames:
print('sending message to', rcpt, file=DEBUGSTREAM)

View File

@ -193,9 +193,8 @@ class FakeResponse:
"""
headers: list of RFC822-style 'Key: value' strings
"""
import mimetools, io
f = io.StringIO("\n".join(headers))
self._headers = mimetools.Message(f)
import email
self._headers = email.message_from_string("\n".join(headers))
self._url = url
def info(self): return self._headers

View File

@ -19,12 +19,14 @@ import threading
import unittest
from test import support
class NoLogRequestHandler:
def log_message(self, *args):
# don't write log messages to stderr
pass
def read(self, n=None):
return ''
class TestServerThread(threading.Thread):
def __init__(self, test_object, request_handler):

View File

@ -944,7 +944,7 @@ else:
url = 'https://%s:%d/%s' % (
HOST, server.port, os.path.split(CERTFILE)[1])
f = urllib.urlopen(url)
dlen = f.info().getheader("content-length")
dlen = f.info().get("content-length")
if dlen and (int(dlen) > 0):
d2 = f.read(int(dlen))
if support.verbose:

View File

@ -2,11 +2,11 @@
import urllib
import http.client
import email.message
import io
import unittest
from test import support
import os
import mimetools
import tempfile
def hexescape(char):
@ -78,7 +78,7 @@ class urlopen_FileTests(unittest.TestCase):
self.returned_obj.close()
def test_info(self):
self.assert_(isinstance(self.returned_obj.info(), mimetools.Message))
self.assert_(isinstance(self.returned_obj.info(), email.message.Message))
def test_geturl(self):
self.assertEqual(self.returned_obj.geturl(), self.pathname)
@ -206,8 +206,8 @@ class urlretrieve_FileTests(unittest.TestCase):
# a headers value is returned.
result = urllib.urlretrieve("file:%s" % support.TESTFN)
self.assertEqual(result[0], support.TESTFN)
self.assert_(isinstance(result[1], mimetools.Message),
"did not get a mimetools.Message instance as second "
self.assert_(isinstance(result[1], email.message.Message),
"did not get a email.message.Message instance as second "
"returned value")
def test_copy(self):

View File

@ -349,18 +349,18 @@ class MockHTTPHandler(urllib2.BaseHandler):
self._count = 0
self.requests = []
def http_open(self, req):
import mimetools, http.client, copy
import email, http.client, copy
from io import StringIO
self.requests.append(copy.deepcopy(req))
if self._count == 0:
self._count = self._count + 1
name = http.client.responses[self.code]
msg = mimetools.Message(StringIO(self.headers))
msg = email.message_from_string(self.headers)
return self.parent.error(
"http", req, MockFile(), self.code, name, msg)
else:
self.req = req
msg = mimetools.Message(StringIO("\r\n\r\n"))
msg = email.message_from_string("\r\n\r\n")
return MockResponse(200, "OK", msg, "", req.get_full_url())
class MockPasswordManager:

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
import mimetools
import email
import threading
import urlparse
import urllib2
@ -443,10 +443,10 @@ class TestUrlopen(unittest.TestCase):
try:
open_url = urllib2.urlopen("http://localhost:%s" % handler.port)
info_obj = open_url.info()
self.assert_(isinstance(info_obj, mimetools.Message),
self.assert_(isinstance(info_obj, email.message.Message),
"object returned by 'info' is not an instance of "
"mimetools.Message")
self.assertEqual(info_obj.getsubtype(), "plain")
"email.message.Message")
self.assertEqual(info_obj.get_content_subtype(), "plain")
finally:
self.server.stop()

View File

@ -8,7 +8,6 @@ import socket
import urllib2
import sys
import os
import mimetools
def _retry_thrice(func, exc, *args, **kwargs):

View File

@ -7,7 +7,7 @@ import socket
import urllib
import sys
import os
import mimetools
import email.message
def _open_with_retry(func, host, *args, **kwargs):
@ -87,10 +87,10 @@ class urlopenNetworkTests(unittest.TestCase):
info_obj = open_url.info()
finally:
open_url.close()
self.assert_(isinstance(info_obj, mimetools.Message),
self.assert_(isinstance(info_obj, email.message.Message),
"object returned by 'info' is not an instance of "
"mimetools.Message")
self.assertEqual(info_obj.getsubtype(), "html")
"email.message.Message")
self.assertEqual(info_obj.get_content_subtype(), "html")
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
@ -180,8 +180,8 @@ class urlretrieveNetworkTests(unittest.TestCase):
# Make sure header returned as 2nd value from urlretrieve is good.
file_location, header = self.urlretrieve("http://www.python.org/")
os.unlink(file_location)
self.assert_(isinstance(header, mimetools.Message),
"header is not an instance of mimetools.Message")
self.assert_(isinstance(header, email.message.Message),
"header is not an instance of email.message.Message")

View File

@ -6,7 +6,6 @@ import unittest
import xmlrpc.client as xmlrpclib
import xmlrpc.server
import threading
import mimetools
import http.client
import socket
import os
@ -452,12 +451,12 @@ class SimpleServerTestCase(unittest.TestCase):
# This is a contrived way to make a failure occur on the server side
# in order to test the _send_traceback_header flag on the server
class FailingMessageClass(mimetools.Message):
def __getitem__(self, key):
class FailingMessageClass(http.client.HTTPMessage):
def get(self, key, failobj=None):
key = key.lower()
if key == 'content-length':
return 'I am broken'
return mimetools.Message.__getitem__(self, key)
return super().get(key, failobj)
class FailingServerTestCase(unittest.TestCase):
@ -477,7 +476,8 @@ class FailingServerTestCase(unittest.TestCase):
# reset flag
xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = False
# reset message class
xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = mimetools.Message
default_class = http.client.HTTPMessage
xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = default_class
def test_basic(self):
# check that flag is false by default
@ -529,8 +529,8 @@ class FailingServerTestCase(unittest.TestCase):
if not is_unavailable_exception(e) and hasattr(e, "headers"):
# We should get error info in the response
expected_err = "invalid literal for int() with base 10: 'I am broken'"
self.assertEqual(e.headers.get("x-exception"), expected_err)
self.assertTrue(e.headers.get("x-traceback") is not None)
self.assertEqual(e.headers.get("X-exception"), expected_err)
self.assertTrue(e.headers.get("X-traceback") is not None)
else:
self.fail('ProtocolError not raised')

View File

@ -17,12 +17,14 @@ The object returned by URLopener().open(file) will differ per
protocol. All you know is that is has methods read(), readline(),
readlines(), fileno(), close() and info(). The read*(), fileno()
and close() methods work like those of open files.
The info() method returns a mimetools.Message object which can be
The info() method returns a email.message.Message object which can be
used to query various info about the object, if available.
(mimetools.Message objects are queried with the getheader() method.)
(email.message.Message objects provide a dict-like interface.)
"""
import http.client
import email.message
import email
import os
import socket
import sys
@ -414,8 +416,7 @@ class URLopener:
def open_local_file(self, url):
"""Use local file."""
import mimetypes, mimetools, email.utils
from io import StringIO
import mimetypes, email.utils
host, file = splithost(url)
localname = url2pathname(file)
try:
@ -425,9 +426,9 @@ class URLopener:
size = stats.st_size
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(url)[0]
headers = mimetools.Message(StringIO(
headers = email.message_from_string(
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
(mtype or 'text/plain', size, modified))
if not host:
urlfile = file
if file[:1] == '/':
@ -448,8 +449,7 @@ class URLopener:
"""Use FTP protocol."""
if not isinstance(url, str):
raise IOError('ftp error', 'proxy support for ftp protocol currently not implemented')
import mimetypes, mimetools
from io import StringIO
import mimetypes
host, path = splithost(url)
if not host: raise IOError('ftp error', 'no host given')
host, port = splitport(host)
@ -498,7 +498,7 @@ class URLopener:
headers += "Content-Type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-Length: %d\n" % retrlen
headers = mimetools.Message(StringIO(headers))
headers = email.message_from_string(headers)
return addinfourl(fp, headers, "ftp:" + url)
except ftperrors() as msg:
raise IOError('ftp error', msg).with_traceback(sys.exc_info()[2])
@ -514,7 +514,6 @@ class URLopener:
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
import mimetools
from io import StringIO
try:
[type, data] = url.split(',', 1)
@ -541,8 +540,8 @@ class URLopener:
msg.append('')
msg.append(data)
msg = '\n'.join(msg)
headers = email.message_from_string(msg)
f = StringIO(msg)
headers = mimetools.Message(f, 0)
#f.fileno = None # needed for addinfourl
return addinfourl(f, headers, url)
@ -761,13 +760,10 @@ def ftperrors():
_noheaders = None
def noheaders():
"""Return an empty mimetools.Message object."""
"""Return an empty email.message.Message object."""
global _noheaders
if _noheaders is None:
import mimetools
from io import StringIO
_noheaders = mimetools.Message(StringIO(), 0)
_noheaders.fp.close() # Recycle file descriptor
_noheaders = email.message.Message()
return _noheaders

View File

@ -91,7 +91,7 @@ import base64
import hashlib
import http.client
import io
import mimetools
import email
import os
import posixpath
import random
@ -549,9 +549,9 @@ class HTTPRedirectHandler(BaseHandler):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if 'location' in headers:
newurl = headers.getheaders('location')[0]
newurl = headers['location']
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
newurl = headers['uri']
else:
return
newurl = urlparse.urljoin(req.get_full_url(), newurl)
@ -1050,7 +1050,7 @@ class AbstractHTTPHandler(BaseHandler):
http_class must implement the HTTPConnection API from http.client.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- info(): return a email.message.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
@ -1140,6 +1140,10 @@ def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
# Because of a trailing comma in the auth string, elt could be the
# empty string.
if not elt:
continue
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
@ -1222,9 +1226,9 @@ class FileHandler(BaseHandler):
size = stats.st_size
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(file)[0]
headers = mimetools.Message(StringIO(
headers = email.message_from_string(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
(mtype or 'text/plain', size, modified))
if host:
host, port = splitport(host)
if not host or \
@ -1290,8 +1294,8 @@ class FTPHandler(BaseHandler):
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
headers = email.message_from_string(headers)
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors as msg:
raise URLError('ftp error: %s' % msg).with_traceback(sys.exc_info()[2])

View File

@ -101,16 +101,16 @@ class WSGIRequestHandler(BaseHTTPRequestHandler):
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
if self.headers.get('content-type') is None:
env['CONTENT_TYPE'] = self.headers.get_content_type()
else:
env['CONTENT_TYPE'] = self.headers.typeheader
env['CONTENT_TYPE'] = self.headers['content-type']
length = self.headers.getheader('content-length')
length = self.headers.get('content-length')
if length:
env['CONTENT_LENGTH'] = length
for h in self.headers.headers:
for h in self.headers:
k,v = h.split(':',1)
k=k.replace('-','_').upper(); v=v.strip()
if k in env:

View File

@ -14,7 +14,7 @@ class ErrorMessage(rfc822.Message):
self.sub = ''
def is_warning(self):
sub = self.getheader('Subject')
sub = self.get('Subject')
if not sub:
return 0
sub = sub.lower()

View File

@ -53,7 +53,7 @@ def _check1version(package, url, version, verbose=0):
print(' Cannot open:', arg)
return -1, None, None
msg = rfc822.Message(fp, seekable=0)
newversion = msg.getheader('current-version')
newversion = msg.get('current-version')
if not newversion:
if verbose >= VERBOSE_EACHFILE:
print(' No "Current-Version:" header in URL or URL not found')