2010-10-27 15:58:04 -03:00
|
|
|
# regression test for SAX 2.0 -*- coding: utf-8 -*-
|
2000-09-24 09:24:24 -03:00
|
|
|
# $Id$
|
|
|
|
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax import make_parser, ContentHandler, \
|
2017-05-05 05:11:55 -03:00
|
|
|
SAXException, SAXReaderNotAvailable, SAXParseException, \
|
|
|
|
saxutils
|
2000-10-06 14:41:52 -03:00
|
|
|
try:
|
|
|
|
make_parser()
|
2000-10-06 18:13:23 -03:00
|
|
|
except SAXReaderNotAvailable:
|
2000-10-06 14:41:52 -03:00
|
|
|
# don't try to test this module if we cannot create a parser
|
|
|
|
raise ImportError("no XML parsers available")
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax.saxutils import XMLGenerator, escape, unescape, quoteattr, \
|
2015-04-02 14:55:46 -03:00
|
|
|
XMLFilterBase, prepare_input_source
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax.expatreader import create_parser
|
2010-10-27 15:43:21 -03:00
|
|
|
from xml.sax.handler import feature_namespaces
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax.xmlreader import InputSource, AttributesImpl, AttributesNSImpl
|
2000-09-24 09:24:24 -03:00
|
|
|
from cStringIO import StringIO
|
2013-02-10 08:26:08 -04:00
|
|
|
import io
|
2015-04-02 17:05:23 -03:00
|
|
|
import gc
|
2013-02-02 06:16:22 -04:00
|
|
|
import os.path
|
2013-02-02 04:28:30 -04:00
|
|
|
import shutil
|
|
|
|
import test.test_support as support
|
2015-04-02 17:05:23 -03:00
|
|
|
from test.test_support import findfile, run_unittest, TESTFN
|
2007-03-28 20:34:06 -03:00
|
|
|
import unittest
|
2000-09-24 09:24:24 -03:00
|
|
|
|
2010-03-13 08:41:48 -04:00
|
|
|
TEST_XMLFILE = findfile("test.xml", subdir="xmltestdata")
|
|
|
|
TEST_XMLFILE_OUT = findfile("test.xml.out", subdir="xmltestdata")
|
2010-03-13 07:18:49 -04:00
|
|
|
|
2013-02-02 06:16:22 -04:00
|
|
|
supports_unicode_filenames = True
|
|
|
|
if not os.path.supports_unicode_filenames:
|
|
|
|
try:
|
|
|
|
support.TESTFN_UNICODE.encode(support.TESTFN_ENCODING)
|
|
|
|
except (AttributeError, UnicodeError, TypeError):
|
|
|
|
# Either the file system encoding is None, or the file name
|
|
|
|
# cannot be encoded in the file system encoding.
|
|
|
|
supports_unicode_filenames = False
|
|
|
|
requires_unicode_filenames = unittest.skipUnless(
|
|
|
|
supports_unicode_filenames,
|
|
|
|
'Requires unicode filenames support')
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
ns_uri = "http://www.python.org/xml-ns/saxtest/"
|
2000-09-24 09:24:24 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class XmlTestBase(unittest.TestCase):
|
|
|
|
def verify_empty_attrs(self, attrs):
|
|
|
|
self.assertRaises(KeyError, attrs.getValue, "attr")
|
|
|
|
self.assertRaises(KeyError, attrs.getValueByQName, "attr")
|
|
|
|
self.assertRaises(KeyError, attrs.getNameByQName, "attr")
|
|
|
|
self.assertRaises(KeyError, attrs.getQNameByName, "attr")
|
|
|
|
self.assertRaises(KeyError, attrs.__getitem__, "attr")
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.getLength(), 0)
|
|
|
|
self.assertEqual(attrs.getNames(), [])
|
|
|
|
self.assertEqual(attrs.getQNames(), [])
|
|
|
|
self.assertEqual(len(attrs), 0)
|
2007-03-28 20:34:06 -03:00
|
|
|
self.assertFalse(attrs.has_key("attr"))
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.keys(), [])
|
|
|
|
self.assertEqual(attrs.get("attrs"), None)
|
|
|
|
self.assertEqual(attrs.get("attrs", 25), 25)
|
|
|
|
self.assertEqual(attrs.items(), [])
|
|
|
|
self.assertEqual(attrs.values(), [])
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def verify_empty_nsattrs(self, attrs):
|
|
|
|
self.assertRaises(KeyError, attrs.getValue, (ns_uri, "attr"))
|
|
|
|
self.assertRaises(KeyError, attrs.getValueByQName, "ns:attr")
|
|
|
|
self.assertRaises(KeyError, attrs.getNameByQName, "ns:attr")
|
|
|
|
self.assertRaises(KeyError, attrs.getQNameByName, (ns_uri, "attr"))
|
|
|
|
self.assertRaises(KeyError, attrs.__getitem__, (ns_uri, "attr"))
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.getLength(), 0)
|
|
|
|
self.assertEqual(attrs.getNames(), [])
|
|
|
|
self.assertEqual(attrs.getQNames(), [])
|
|
|
|
self.assertEqual(len(attrs), 0)
|
2007-03-28 20:34:06 -03:00
|
|
|
self.assertFalse(attrs.has_key((ns_uri, "attr")))
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.keys(), [])
|
|
|
|
self.assertEqual(attrs.get((ns_uri, "attr")), None)
|
|
|
|
self.assertEqual(attrs.get((ns_uri, "attr"), 25), 25)
|
|
|
|
self.assertEqual(attrs.items(), [])
|
|
|
|
self.assertEqual(attrs.values(), [])
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
def verify_attrs_wattr(self, attrs):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.getLength(), 1)
|
|
|
|
self.assertEqual(attrs.getNames(), ["attr"])
|
|
|
|
self.assertEqual(attrs.getQNames(), ["attr"])
|
|
|
|
self.assertEqual(len(attrs), 1)
|
2007-03-28 20:34:06 -03:00
|
|
|
self.assertTrue(attrs.has_key("attr"))
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.keys(), ["attr"])
|
|
|
|
self.assertEqual(attrs.get("attr"), "val")
|
|
|
|
self.assertEqual(attrs.get("attr", 25), "val")
|
|
|
|
self.assertEqual(attrs.items(), [("attr", "val")])
|
|
|
|
self.assertEqual(attrs.values(), ["val"])
|
|
|
|
self.assertEqual(attrs.getValue("attr"), "val")
|
|
|
|
self.assertEqual(attrs.getValueByQName("attr"), "val")
|
|
|
|
self.assertEqual(attrs.getNameByQName("attr"), "attr")
|
|
|
|
self.assertEqual(attrs["attr"], "val")
|
|
|
|
self.assertEqual(attrs.getQNameByName("attr"), "attr")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2015-04-02 17:05:23 -03:00
|
|
|
|
|
|
|
def xml_unicode(doc, encoding=None):
|
|
|
|
if encoding is None:
|
|
|
|
return doc
|
|
|
|
return u'<?xml version="1.0" encoding="%s"?>\n%s' % (encoding, doc)
|
|
|
|
|
|
|
|
def xml_bytes(doc, encoding, decl_encoding=Ellipsis):
|
|
|
|
if decl_encoding is Ellipsis:
|
|
|
|
decl_encoding = encoding
|
|
|
|
return xml_unicode(doc, decl_encoding).encode(encoding, 'xmlcharrefreplace')
|
|
|
|
|
|
|
|
def make_xml_file(doc, encoding, decl_encoding=Ellipsis):
|
|
|
|
if decl_encoding is Ellipsis:
|
|
|
|
decl_encoding = encoding
|
|
|
|
with io.open(TESTFN, 'w', encoding=encoding, errors='xmlcharrefreplace') as f:
|
|
|
|
f.write(xml_unicode(doc, decl_encoding))
|
|
|
|
|
|
|
|
|
|
|
|
class ParseTest(unittest.TestCase):
|
|
|
|
data = support.u(r'<money value="$\xa3\u20ac\U0001017b">'
|
|
|
|
r'$\xa3\u20ac\U0001017b</money>')
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
support.unlink(TESTFN)
|
|
|
|
|
|
|
|
def check_parse(self, f):
|
|
|
|
from xml.sax import parse
|
|
|
|
result = StringIO()
|
|
|
|
parse(f, XMLGenerator(result, 'utf-8'))
|
|
|
|
self.assertEqual(result.getvalue(), xml_bytes(self.data, 'utf-8'))
|
|
|
|
|
|
|
|
def test_parse_bytes(self):
|
|
|
|
# UTF-8 is default encoding, US-ASCII is compatible with UTF-8,
|
|
|
|
# UTF-16 is autodetected
|
|
|
|
encodings = ('us-ascii', 'utf-8', 'utf-16', 'utf-16le', 'utf-16be')
|
|
|
|
for encoding in encodings:
|
|
|
|
self.check_parse(io.BytesIO(xml_bytes(self.data, encoding)))
|
|
|
|
make_xml_file(self.data, encoding)
|
|
|
|
self.check_parse(TESTFN)
|
|
|
|
with io.open(TESTFN, 'rb') as f:
|
|
|
|
self.check_parse(f)
|
|
|
|
self.check_parse(io.BytesIO(xml_bytes(self.data, encoding, None)))
|
|
|
|
make_xml_file(self.data, encoding, None)
|
|
|
|
self.check_parse(TESTFN)
|
|
|
|
with io.open(TESTFN, 'rb') as f:
|
|
|
|
self.check_parse(f)
|
|
|
|
# accept UTF-8 with BOM
|
|
|
|
self.check_parse(io.BytesIO(xml_bytes(self.data, 'utf-8-sig', 'utf-8')))
|
|
|
|
make_xml_file(self.data, 'utf-8-sig', 'utf-8')
|
|
|
|
self.check_parse(TESTFN)
|
|
|
|
with io.open(TESTFN, 'rb') as f:
|
|
|
|
self.check_parse(f)
|
|
|
|
self.check_parse(io.BytesIO(xml_bytes(self.data, 'utf-8-sig', None)))
|
|
|
|
make_xml_file(self.data, 'utf-8-sig', None)
|
|
|
|
self.check_parse(TESTFN)
|
|
|
|
with io.open(TESTFN, 'rb') as f:
|
|
|
|
self.check_parse(f)
|
|
|
|
# accept data with declared encoding
|
|
|
|
self.check_parse(io.BytesIO(xml_bytes(self.data, 'iso-8859-1')))
|
|
|
|
make_xml_file(self.data, 'iso-8859-1')
|
|
|
|
self.check_parse(TESTFN)
|
|
|
|
with io.open(TESTFN, 'rb') as f:
|
|
|
|
self.check_parse(f)
|
|
|
|
# fail on non-UTF-8 incompatible data without declared encoding
|
|
|
|
with self.assertRaises(SAXException):
|
|
|
|
self.check_parse(io.BytesIO(xml_bytes(self.data, 'iso-8859-1', None)))
|
|
|
|
make_xml_file(self.data, 'iso-8859-1', None)
|
|
|
|
with self.assertRaises(SAXException):
|
|
|
|
self.check_parse(TESTFN)
|
|
|
|
with io.open(TESTFN, 'rb') as f:
|
|
|
|
with self.assertRaises(SAXException):
|
|
|
|
self.check_parse(f)
|
|
|
|
|
|
|
|
def test_parse_InputSource(self):
|
|
|
|
# accept data without declared but with explicitly specified encoding
|
|
|
|
make_xml_file(self.data, 'iso-8859-1', None)
|
|
|
|
with io.open(TESTFN, 'rb') as f:
|
|
|
|
input = InputSource()
|
|
|
|
input.setByteStream(f)
|
|
|
|
input.setEncoding('iso-8859-1')
|
|
|
|
self.check_parse(input)
|
|
|
|
|
2017-05-05 05:11:55 -03:00
|
|
|
def test_parse_close_source(self):
|
|
|
|
builtin_open = open
|
|
|
|
non_local = {'fileobj': None}
|
|
|
|
|
|
|
|
def mock_open(*args):
|
|
|
|
fileobj = builtin_open(*args)
|
|
|
|
non_local['fileobj'] = fileobj
|
|
|
|
return fileobj
|
|
|
|
|
|
|
|
with support.swap_attr(saxutils, 'open', mock_open):
|
|
|
|
make_xml_file(self.data, 'iso-8859-1', None)
|
|
|
|
with self.assertRaises(SAXException):
|
|
|
|
self.check_parse(TESTFN)
|
|
|
|
self.assertTrue(non_local['fileobj'].closed)
|
|
|
|
|
2015-04-02 17:05:23 -03:00
|
|
|
def check_parseString(self, s):
|
|
|
|
from xml.sax import parseString
|
|
|
|
result = StringIO()
|
|
|
|
parseString(s, XMLGenerator(result, 'utf-8'))
|
|
|
|
self.assertEqual(result.getvalue(), xml_bytes(self.data, 'utf-8'))
|
|
|
|
|
|
|
|
def test_parseString_bytes(self):
|
|
|
|
# UTF-8 is default encoding, US-ASCII is compatible with UTF-8,
|
|
|
|
# UTF-16 is autodetected
|
|
|
|
encodings = ('us-ascii', 'utf-8', 'utf-16', 'utf-16le', 'utf-16be')
|
|
|
|
for encoding in encodings:
|
|
|
|
self.check_parseString(xml_bytes(self.data, encoding))
|
|
|
|
self.check_parseString(xml_bytes(self.data, encoding, None))
|
|
|
|
# accept UTF-8 with BOM
|
|
|
|
self.check_parseString(xml_bytes(self.data, 'utf-8-sig', 'utf-8'))
|
|
|
|
self.check_parseString(xml_bytes(self.data, 'utf-8-sig', None))
|
|
|
|
# accept data with declared encoding
|
|
|
|
self.check_parseString(xml_bytes(self.data, 'iso-8859-1'))
|
|
|
|
# fail on non-UTF-8 incompatible data without declared encoding
|
|
|
|
with self.assertRaises(SAXException):
|
|
|
|
self.check_parseString(xml_bytes(self.data, 'iso-8859-1', None))
|
|
|
|
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class MakeParserTest(unittest.TestCase):
|
|
|
|
def test_make_parser2(self):
|
2000-10-24 12:35:07 -03:00
|
|
|
# Creating parsers several times in a row should succeed.
|
|
|
|
# Testing this because there have been failures of this kind
|
|
|
|
# before.
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax import make_parser
|
2000-10-24 12:35:07 -03:00
|
|
|
p = make_parser()
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax import make_parser
|
2000-10-24 12:35:07 -03:00
|
|
|
p = make_parser()
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax import make_parser
|
2000-10-24 12:35:07 -03:00
|
|
|
p = make_parser()
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax import make_parser
|
2000-10-24 12:35:07 -03:00
|
|
|
p = make_parser()
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax import make_parser
|
2000-10-24 12:35:07 -03:00
|
|
|
p = make_parser()
|
2006-07-29 13:56:15 -03:00
|
|
|
from xml.sax import make_parser
|
2000-10-24 12:35:07 -03:00
|
|
|
p = make_parser()
|
2001-01-17 22:22:22 -04:00
|
|
|
|
|
|
|
|
2000-09-24 09:24:24 -03:00
|
|
|
# ===========================================================================
|
|
|
|
#
|
|
|
|
# saxutils tests
|
|
|
|
#
|
|
|
|
# ===========================================================================
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class SaxutilsTest(unittest.TestCase):
|
|
|
|
# ===== escape
|
|
|
|
def test_escape_basic(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(escape("Donald Duck & Co"), "Donald Duck & Co")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_escape_all(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(escape("<Donald Duck & Co>"),
|
|
|
|
"<Donald Duck & Co>")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_escape_extra(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(escape("Hei på deg", {"å" : "å"}),
|
|
|
|
"Hei på deg")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
# ===== unescape
|
|
|
|
def test_unescape_basic(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(unescape("Donald Duck & Co"), "Donald Duck & Co")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_unescape_all(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(unescape("<Donald Duck & Co>"),
|
|
|
|
"<Donald Duck & Co>")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_unescape_extra(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(unescape("Hei på deg", {"å" : "å"}),
|
|
|
|
"Hei på deg")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_unescape_amp_extra(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(unescape("&foo;", {"&foo;": "splat"}), "&foo;")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
# ===== quoteattr
|
|
|
|
def test_quoteattr_basic(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(quoteattr("Donald Duck & Co"),
|
|
|
|
'"Donald Duck & Co"')
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_single_quoteattr(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(quoteattr('Includes "double" quotes'),
|
|
|
|
'\'Includes "double" quotes\'')
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_double_quoteattr(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(quoteattr("Includes 'single' quotes"),
|
|
|
|
"\"Includes 'single' quotes\"")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_single_double_quoteattr(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(quoteattr("Includes 'single' and \"double\" quotes"),
|
|
|
|
"\"Includes 'single' and "double" quotes\"")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
# ===== make_parser
|
|
|
|
def test_make_parser(self):
|
2000-10-06 14:41:52 -03:00
|
|
|
# Creating a parser should succeed - it should fall back
|
|
|
|
# to the expatreader
|
2006-07-29 13:56:15 -03:00
|
|
|
p = make_parser(['xml.parsers.no_such_parser'])
|
2000-10-06 14:41:52 -03:00
|
|
|
|
|
|
|
|
2015-04-02 14:55:46 -03:00
|
|
|
class PrepareInputSourceTest(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.file = support.TESTFN
|
|
|
|
with open(self.file, "w") as tmp:
|
|
|
|
tmp.write("This was read from a file.")
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
support.unlink(self.file)
|
|
|
|
|
|
|
|
def make_byte_stream(self):
|
|
|
|
return io.BytesIO(b"This is a byte stream.")
|
|
|
|
|
|
|
|
def checkContent(self, stream, content):
|
|
|
|
self.assertIsNotNone(stream)
|
|
|
|
self.assertEqual(stream.read(), content)
|
|
|
|
stream.close()
|
|
|
|
|
|
|
|
|
|
|
|
def test_byte_stream(self):
|
|
|
|
# If the source is an InputSource that does not have a character
|
|
|
|
# stream but does have a byte stream, use the byte stream.
|
|
|
|
src = InputSource(self.file)
|
|
|
|
src.setByteStream(self.make_byte_stream())
|
|
|
|
prep = prepare_input_source(src)
|
|
|
|
self.assertIsNone(prep.getCharacterStream())
|
|
|
|
self.checkContent(prep.getByteStream(),
|
|
|
|
b"This is a byte stream.")
|
|
|
|
|
|
|
|
def test_system_id(self):
|
|
|
|
# If the source is an InputSource that has neither a character
|
|
|
|
# stream nor a byte stream, open the system ID.
|
|
|
|
src = InputSource(self.file)
|
|
|
|
prep = prepare_input_source(src)
|
|
|
|
self.assertIsNone(prep.getCharacterStream())
|
|
|
|
self.checkContent(prep.getByteStream(),
|
|
|
|
b"This was read from a file.")
|
|
|
|
|
|
|
|
def test_string(self):
|
|
|
|
# If the source is a string, use it as a system ID and open it.
|
|
|
|
prep = prepare_input_source(self.file)
|
|
|
|
self.assertIsNone(prep.getCharacterStream())
|
|
|
|
self.checkContent(prep.getByteStream(),
|
|
|
|
b"This was read from a file.")
|
|
|
|
|
|
|
|
def test_binary_file(self):
|
|
|
|
# If the source is a binary file-like object, use it as a byte
|
|
|
|
# stream.
|
|
|
|
prep = prepare_input_source(self.make_byte_stream())
|
|
|
|
self.assertIsNone(prep.getCharacterStream())
|
|
|
|
self.checkContent(prep.getByteStream(),
|
|
|
|
b"This is a byte stream.")
|
|
|
|
|
|
|
|
|
2000-09-24 09:24:24 -03:00
|
|
|
# ===== XMLGenerator
|
|
|
|
|
|
|
|
start = '<?xml version="1.0" encoding="iso-8859-1"?>\n'
|
|
|
|
|
2013-02-10 08:26:08 -04:00
|
|
|
class XmlgenTest:
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_xmlgen_basic(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {})
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + "<doc></doc>")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_xmlgen_content(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {})
|
|
|
|
gen.characters("huhei")
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + "<doc>huhei</doc>")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
def test_xmlgen_pi(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.processingInstruction("test", "data")
|
|
|
|
gen.startElement("doc", {})
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + "<?test data?><doc></doc>")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
def test_xmlgen_content_escape(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {})
|
|
|
|
gen.characters("<huhei&")
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(),
|
2007-03-28 20:34:06 -03:00
|
|
|
start + "<doc><huhei&</doc>")
|
|
|
|
|
|
|
|
def test_xmlgen_attr_escape(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {"a": '"'})
|
|
|
|
gen.startElement("e", {"a": "'"})
|
|
|
|
gen.endElement("e")
|
|
|
|
gen.startElement("e", {"a": "'\""})
|
|
|
|
gen.endElement("e")
|
|
|
|
gen.startElement("e", {"a": "\n\r\t"})
|
|
|
|
gen.endElement("e")
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start +
|
2007-03-28 20:34:06 -03:00
|
|
|
("<doc a='\"'><e a=\"'\"></e>"
|
|
|
|
"<e a=\"'"\"></e>"
|
|
|
|
"<e a=\" 	\"></e></doc>"))
|
|
|
|
|
2013-02-10 08:26:08 -04:00
|
|
|
def test_xmlgen_encoding(self):
|
|
|
|
encodings = ('iso-8859-15', 'utf-8',
|
|
|
|
'utf-16be', 'utf-16le',
|
|
|
|
'utf-32be', 'utf-32le')
|
|
|
|
for encoding in encodings:
|
|
|
|
result = self.ioclass()
|
|
|
|
gen = XMLGenerator(result, encoding=encoding)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {"a": u'\u20ac'})
|
|
|
|
gen.characters(u"\u20ac")
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
|
|
|
|
|
|
|
self.assertEqual(result.getvalue(), (
|
|
|
|
u'<?xml version="1.0" encoding="%s"?>\n'
|
|
|
|
u'<doc a="\u20ac">\u20ac</doc>' % encoding
|
|
|
|
).encode(encoding, 'xmlcharrefreplace'))
|
|
|
|
|
|
|
|
def test_xmlgen_unencodable(self):
|
|
|
|
result = self.ioclass()
|
|
|
|
gen = XMLGenerator(result, encoding='ascii')
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {"a": u'\u20ac'})
|
|
|
|
gen.characters(u"\u20ac")
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
|
|
|
|
|
|
|
self.assertEqual(result.getvalue(),
|
|
|
|
'<?xml version="1.0" encoding="ascii"?>\n'
|
|
|
|
'<doc a="€">€</doc>')
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_xmlgen_ignorable(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {})
|
|
|
|
gen.ignorableWhitespace(" ")
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + "<doc> </doc>")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
2013-05-12 11:29:34 -03:00
|
|
|
def test_xmlgen_encoding_bytes(self):
|
|
|
|
encodings = ('iso-8859-15', 'utf-8',
|
|
|
|
'utf-16be', 'utf-16le',
|
|
|
|
'utf-32be', 'utf-32le')
|
|
|
|
for encoding in encodings:
|
|
|
|
result = self.ioclass()
|
|
|
|
gen = XMLGenerator(result, encoding=encoding)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {"a": u'\u20ac'})
|
|
|
|
gen.characters(u"\u20ac".encode(encoding))
|
|
|
|
gen.ignorableWhitespace(" ".encode(encoding))
|
|
|
|
gen.endElement("doc")
|
|
|
|
gen.endDocument()
|
|
|
|
|
|
|
|
self.assertEqual(result.getvalue(), (
|
|
|
|
u'<?xml version="1.0" encoding="%s"?>\n'
|
|
|
|
u'<doc a="\u20ac">\u20ac </doc>' % encoding
|
|
|
|
).encode(encoding, 'xmlcharrefreplace'))
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_xmlgen_ns(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startPrefixMapping("ns1", ns_uri)
|
|
|
|
gen.startElementNS((ns_uri, "doc"), "ns1:doc", {})
|
|
|
|
# add an unqualified name
|
|
|
|
gen.startElementNS((None, "udoc"), None, {})
|
|
|
|
gen.endElementNS((None, "udoc"), None)
|
|
|
|
gen.endElementNS((ns_uri, "doc"), "ns1:doc")
|
|
|
|
gen.endPrefixMapping("ns1")
|
|
|
|
gen.endDocument()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + \
|
2000-10-03 19:35:29 -03:00
|
|
|
('<ns1:doc xmlns:ns1="%s"><udoc></udoc></ns1:doc>' %
|
2007-03-28 20:34:06 -03:00
|
|
|
ns_uri))
|
2000-09-24 09:24:24 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_1463026_1(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
gen.startDocument()
|
|
|
|
gen.startElementNS((None, 'a'), 'a', {(None, 'b'):'c'})
|
|
|
|
gen.endElementNS((None, 'a'), 'a')
|
|
|
|
gen.endDocument()
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start+'<a b="c"></a>')
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_1463026_2(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
gen.startDocument()
|
|
|
|
gen.startPrefixMapping(None, 'qux')
|
|
|
|
gen.startElementNS(('qux', 'a'), 'a', {})
|
|
|
|
gen.endElementNS(('qux', 'a'), 'a')
|
|
|
|
gen.endPrefixMapping(None)
|
|
|
|
gen.endDocument()
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start+'<a xmlns="qux"></a>')
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_1463026_3(self):
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2007-03-28 20:34:06 -03:00
|
|
|
gen = XMLGenerator(result)
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
gen.startDocument()
|
|
|
|
gen.startPrefixMapping('my', 'qux')
|
|
|
|
gen.startElementNS(('qux', 'a'), 'a', {(None, 'b'):'c'})
|
|
|
|
gen.endElementNS(('qux', 'a'), 'a')
|
|
|
|
gen.endPrefixMapping('my')
|
|
|
|
gen.endDocument()
|
2007-02-12 08:21:10 -04:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(),
|
2007-03-28 20:34:06 -03:00
|
|
|
start+'<my:a xmlns:my="qux" b="c"></my:a>')
|
2007-03-12 15:07:52 -03:00
|
|
|
|
2010-10-27 15:43:21 -03:00
|
|
|
def test_5027_1(self):
|
|
|
|
# The xml prefix (as in xml:lang below) is reserved and bound by
|
|
|
|
# definition to http://www.w3.org/XML/1998/namespace. XMLGenerator had
|
2012-12-18 15:27:37 -04:00
|
|
|
# a bug whereby a KeyError is raised because this namespace is missing
|
2010-10-27 15:43:21 -03:00
|
|
|
# from a dictionary.
|
|
|
|
#
|
|
|
|
# This test demonstrates the bug by parsing a document.
|
|
|
|
test_xml = StringIO(
|
|
|
|
'<?xml version="1.0"?>'
|
|
|
|
'<a:g1 xmlns:a="http://example.com/ns">'
|
|
|
|
'<a:g2 xml:lang="en">Hello</a:g2>'
|
|
|
|
'</a:g1>')
|
|
|
|
|
|
|
|
parser = make_parser()
|
|
|
|
parser.setFeature(feature_namespaces, True)
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2010-10-27 15:43:21 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
parser.setContentHandler(gen)
|
|
|
|
parser.parse(test_xml)
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(),
|
|
|
|
start + (
|
|
|
|
'<a:g1 xmlns:a="http://example.com/ns">'
|
|
|
|
'<a:g2 xml:lang="en">Hello</a:g2>'
|
|
|
|
'</a:g1>'))
|
2010-10-27 15:43:21 -03:00
|
|
|
|
|
|
|
def test_5027_2(self):
|
|
|
|
# The xml prefix (as in xml:lang below) is reserved and bound by
|
|
|
|
# definition to http://www.w3.org/XML/1998/namespace. XMLGenerator had
|
2012-12-18 15:27:37 -04:00
|
|
|
# a bug whereby a KeyError is raised because this namespace is missing
|
2010-10-27 15:43:21 -03:00
|
|
|
# from a dictionary.
|
|
|
|
#
|
|
|
|
# This test demonstrates the bug by direct manipulation of the
|
|
|
|
# XMLGenerator.
|
2013-02-10 08:26:08 -04:00
|
|
|
result = self.ioclass()
|
2010-10-27 15:43:21 -03:00
|
|
|
gen = XMLGenerator(result)
|
|
|
|
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startPrefixMapping('a', 'http://example.com/ns')
|
|
|
|
gen.startElementNS(('http://example.com/ns', 'g1'), 'g1', {})
|
|
|
|
lang_attr = {('http://www.w3.org/XML/1998/namespace', 'lang'): 'en'}
|
|
|
|
gen.startElementNS(('http://example.com/ns', 'g2'), 'g2', lang_attr)
|
|
|
|
gen.characters('Hello')
|
|
|
|
gen.endElementNS(('http://example.com/ns', 'g2'), 'g2')
|
|
|
|
gen.endElementNS(('http://example.com/ns', 'g1'), 'g1')
|
|
|
|
gen.endPrefixMapping('a')
|
|
|
|
gen.endDocument()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(),
|
|
|
|
start + (
|
|
|
|
'<a:g1 xmlns:a="http://example.com/ns">'
|
|
|
|
'<a:g2 xml:lang="en">Hello</a:g2>'
|
|
|
|
'</a:g1>'))
|
2010-10-27 15:43:21 -03:00
|
|
|
|
2013-02-10 08:26:08 -04:00
|
|
|
def test_no_close_file(self):
|
|
|
|
result = self.ioclass()
|
|
|
|
def func(out):
|
|
|
|
gen = XMLGenerator(out)
|
|
|
|
gen.startDocument()
|
|
|
|
gen.startElement("doc", {})
|
|
|
|
func(result)
|
|
|
|
self.assertFalse(result.closed)
|
|
|
|
|
2013-02-25 07:31:29 -04:00
|
|
|
def test_xmlgen_fragment(self):
|
|
|
|
result = self.ioclass()
|
|
|
|
gen = XMLGenerator(result)
|
|
|
|
|
|
|
|
# Don't call gen.startDocument()
|
|
|
|
gen.startElement("foo", {"a": "1.0"})
|
|
|
|
gen.characters("Hello")
|
|
|
|
gen.endElement("foo")
|
|
|
|
gen.startElement("bar", {"b": "2.0"})
|
|
|
|
gen.endElement("bar")
|
|
|
|
# Don't call gen.endDocument()
|
|
|
|
|
|
|
|
self.assertEqual(result.getvalue(),
|
|
|
|
'<foo a="1.0">Hello</foo><bar b="2.0"></bar>')
|
|
|
|
|
2013-02-10 08:26:08 -04:00
|
|
|
class StringXmlgenTest(XmlgenTest, unittest.TestCase):
|
|
|
|
ioclass = StringIO
|
|
|
|
|
|
|
|
class BytesIOXmlgenTest(XmlgenTest, unittest.TestCase):
|
|
|
|
ioclass = io.BytesIO
|
|
|
|
|
|
|
|
class WriterXmlgenTest(XmlgenTest, unittest.TestCase):
|
|
|
|
class ioclass(list):
|
|
|
|
write = list.append
|
|
|
|
closed = False
|
|
|
|
|
|
|
|
def getvalue(self):
|
|
|
|
return b''.join(self)
|
|
|
|
|
2000-09-24 09:24:24 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class XMLFilterBaseTest(unittest.TestCase):
|
|
|
|
def test_filter_basic(self):
|
|
|
|
result = StringIO()
|
|
|
|
gen = XMLGenerator(result)
|
|
|
|
filter = XMLFilterBase()
|
|
|
|
filter.setContentHandler(gen)
|
2000-10-23 14:22:08 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
filter.startDocument()
|
|
|
|
filter.startElement("doc", {})
|
|
|
|
filter.characters("content")
|
|
|
|
filter.ignorableWhitespace(" ")
|
|
|
|
filter.endElement("doc")
|
|
|
|
filter.endDocument()
|
2000-09-24 09:24:24 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + "<doc>content </doc>")
|
2000-09-24 09:24:24 -03:00
|
|
|
|
|
|
|
# ===========================================================================
|
|
|
|
#
|
|
|
|
# expatreader tests
|
|
|
|
#
|
|
|
|
# ===========================================================================
|
|
|
|
|
2010-03-13 07:18:49 -04:00
|
|
|
xml_test_out = open(TEST_XMLFILE_OUT).read()
|
2000-09-24 15:53:56 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class ExpatReaderTest(XmlTestBase):
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
# ===== XMLReader support
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2015-04-02 14:55:46 -03:00
|
|
|
def test_expat_binary_file(self):
|
2007-03-28 20:34:06 -03:00
|
|
|
parser = create_parser()
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.setContentHandler(xmlgen)
|
2010-03-13 07:18:49 -04:00
|
|
|
parser.parse(open(TEST_XMLFILE))
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), xml_test_out)
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2013-02-02 06:16:22 -04:00
|
|
|
@requires_unicode_filenames
|
2013-02-02 04:28:30 -04:00
|
|
|
def test_expat_file_unicode(self):
|
|
|
|
fname = support.TESTFN_UNICODE
|
|
|
|
shutil.copyfile(TEST_XMLFILE, fname)
|
|
|
|
self.addCleanup(support.unlink, fname)
|
|
|
|
|
|
|
|
parser = create_parser()
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
|
|
|
|
parser.setContentHandler(xmlgen)
|
|
|
|
parser.parse(open(fname))
|
|
|
|
|
|
|
|
self.assertEqual(result.getvalue(), xml_test_out)
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
# ===== DTDHandler support
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class TestDTDHandler:
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def __init__(self):
|
|
|
|
self._notations = []
|
|
|
|
self._entities = []
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def notationDecl(self, name, publicId, systemId):
|
|
|
|
self._notations.append((name, publicId, systemId))
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
|
|
|
|
self._entities.append((name, publicId, systemId, ndata))
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_dtdhandler(self):
|
|
|
|
parser = create_parser()
|
|
|
|
handler = self.TestDTDHandler()
|
|
|
|
parser.setDTDHandler(handler)
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.feed('<!DOCTYPE doc [\n')
|
|
|
|
parser.feed(' <!ENTITY img SYSTEM "expat.gif" NDATA GIF>\n')
|
|
|
|
parser.feed(' <!NOTATION GIF PUBLIC "-//CompuServe//NOTATION Graphics Interchange Format 89a//EN">\n')
|
|
|
|
parser.feed(']>\n')
|
|
|
|
parser.feed('<doc></doc>')
|
|
|
|
parser.close()
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(handler._notations,
|
2007-03-28 20:34:06 -03:00
|
|
|
[("GIF", "-//CompuServe//NOTATION Graphics Interchange Format 89a//EN", None)])
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(handler._entities, [("img", None, "expat.gif", "GIF")])
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
# ===== EntityResolver support
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class TestEntityResolver:
|
2000-10-24 12:35:07 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def resolveEntity(self, publicId, systemId):
|
|
|
|
inpsrc = InputSource()
|
|
|
|
inpsrc.setByteStream(StringIO("<entity/>"))
|
|
|
|
return inpsrc
|
2000-10-06 18:13:23 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_entityresolver(self):
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setEntityResolver(self.TestEntityResolver())
|
|
|
|
result = StringIO()
|
|
|
|
parser.setContentHandler(XMLGenerator(result))
|
|
|
|
|
|
|
|
parser.feed('<!DOCTYPE doc [\n')
|
|
|
|
parser.feed(' <!ENTITY test SYSTEM "whatever">\n')
|
|
|
|
parser.feed(']>\n')
|
|
|
|
parser.feed('<doc>&test;</doc>')
|
|
|
|
parser.close()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start +
|
|
|
|
"<doc><entity></entity></doc>")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
# ===== Attributes support
|
|
|
|
|
|
|
|
class AttrGatherer(ContentHandler):
|
|
|
|
|
|
|
|
def startElement(self, name, attrs):
|
|
|
|
self._attrs = attrs
|
|
|
|
|
|
|
|
def startElementNS(self, name, qname, attrs):
|
|
|
|
self._attrs = attrs
|
|
|
|
|
|
|
|
def test_expat_attrs_empty(self):
|
|
|
|
parser = create_parser()
|
|
|
|
gather = self.AttrGatherer()
|
|
|
|
parser.setContentHandler(gather)
|
|
|
|
|
|
|
|
parser.feed("<doc/>")
|
|
|
|
parser.close()
|
|
|
|
|
|
|
|
self.verify_empty_attrs(gather._attrs)
|
|
|
|
|
|
|
|
def test_expat_attrs_wattr(self):
|
|
|
|
parser = create_parser()
|
|
|
|
gather = self.AttrGatherer()
|
|
|
|
parser.setContentHandler(gather)
|
|
|
|
|
|
|
|
parser.feed("<doc attr='val'/>")
|
|
|
|
parser.close()
|
|
|
|
|
|
|
|
self.verify_attrs_wattr(gather._attrs)
|
|
|
|
|
|
|
|
def test_expat_nsattrs_empty(self):
|
|
|
|
parser = create_parser(1)
|
|
|
|
gather = self.AttrGatherer()
|
|
|
|
parser.setContentHandler(gather)
|
|
|
|
|
|
|
|
parser.feed("<doc/>")
|
|
|
|
parser.close()
|
|
|
|
|
|
|
|
self.verify_empty_nsattrs(gather._attrs)
|
|
|
|
|
|
|
|
def test_expat_nsattrs_wattr(self):
|
|
|
|
parser = create_parser(1)
|
|
|
|
gather = self.AttrGatherer()
|
|
|
|
parser.setContentHandler(gather)
|
|
|
|
|
|
|
|
parser.feed("<doc xmlns:ns='%s' ns:attr='val'/>" % ns_uri)
|
|
|
|
parser.close()
|
|
|
|
|
|
|
|
attrs = gather._attrs
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.getLength(), 1)
|
|
|
|
self.assertEqual(attrs.getNames(), [(ns_uri, "attr")])
|
2007-04-25 03:30:05 -03:00
|
|
|
self.assertTrue((attrs.getQNames() == [] or
|
2007-03-28 20:34:06 -03:00
|
|
|
attrs.getQNames() == ["ns:attr"]))
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(len(attrs), 1)
|
2007-03-28 20:34:06 -03:00
|
|
|
self.assertTrue(attrs.has_key((ns_uri, "attr")))
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.get((ns_uri, "attr")), "val")
|
|
|
|
self.assertEqual(attrs.get((ns_uri, "attr"), 25), "val")
|
|
|
|
self.assertEqual(attrs.items(), [((ns_uri, "attr"), "val")])
|
|
|
|
self.assertEqual(attrs.values(), ["val"])
|
|
|
|
self.assertEqual(attrs.getValue((ns_uri, "attr")), "val")
|
|
|
|
self.assertEqual(attrs[(ns_uri, "attr")], "val")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
# ===== InputSource support
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_inpsource_filename(self):
|
|
|
|
parser = create_parser()
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.setContentHandler(xmlgen)
|
2010-03-13 07:18:49 -04:00
|
|
|
parser.parse(TEST_XMLFILE)
|
2007-03-28 20:34:06 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), xml_test_out)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_inpsource_sysid(self):
|
|
|
|
parser = create_parser()
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.setContentHandler(xmlgen)
|
2010-03-13 07:18:49 -04:00
|
|
|
parser.parse(InputSource(TEST_XMLFILE))
|
2007-03-28 20:34:06 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), xml_test_out)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2013-02-02 06:16:22 -04:00
|
|
|
@requires_unicode_filenames
|
2013-02-02 04:28:30 -04:00
|
|
|
def test_expat_inpsource_sysid_unicode(self):
|
|
|
|
fname = support.TESTFN_UNICODE
|
|
|
|
shutil.copyfile(TEST_XMLFILE, fname)
|
|
|
|
self.addCleanup(support.unlink, fname)
|
|
|
|
|
|
|
|
parser = create_parser()
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
|
|
|
|
parser.setContentHandler(xmlgen)
|
|
|
|
parser.parse(InputSource(fname))
|
|
|
|
|
|
|
|
self.assertEqual(result.getvalue(), xml_test_out)
|
|
|
|
|
2015-04-02 14:55:46 -03:00
|
|
|
def test_expat_inpsource_byte_stream(self):
|
2007-03-28 20:34:06 -03:00
|
|
|
parser = create_parser()
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.setContentHandler(xmlgen)
|
|
|
|
inpsrc = InputSource()
|
2010-03-13 07:18:49 -04:00
|
|
|
inpsrc.setByteStream(open(TEST_XMLFILE))
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.parse(inpsrc)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), xml_test_out)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
# ===== IncrementalParser support
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_incremental(self):
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setContentHandler(xmlgen)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.feed("<doc>")
|
|
|
|
parser.feed("</doc>")
|
|
|
|
parser.close()
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + "<doc></doc>")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_incremental_reset(self):
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setContentHandler(xmlgen)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.feed("<doc>")
|
|
|
|
parser.feed("text")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
parser.setContentHandler(xmlgen)
|
|
|
|
parser.reset()
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.feed("<doc>")
|
|
|
|
parser.feed("text")
|
|
|
|
parser.feed("</doc>")
|
|
|
|
parser.close()
|
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(result.getvalue(), start + "<doc>text</doc>")
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
# ===== Locator support
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_locator_noinfo(self):
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setContentHandler(xmlgen)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
parser.feed("<doc>")
|
|
|
|
parser.feed("</doc>")
|
|
|
|
parser.close()
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(parser.getSystemId(), None)
|
|
|
|
self.assertEqual(parser.getPublicId(), None)
|
|
|
|
self.assertEqual(parser.getLineNumber(), 1)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_locator_withinfo(self):
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setContentHandler(xmlgen)
|
2010-03-13 07:18:49 -04:00
|
|
|
parser.parse(TEST_XMLFILE)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(parser.getSystemId(), TEST_XMLFILE)
|
|
|
|
self.assertEqual(parser.getPublicId(), None)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2013-02-02 06:16:22 -04:00
|
|
|
@requires_unicode_filenames
|
2013-02-02 04:28:30 -04:00
|
|
|
def test_expat_locator_withinfo_unicode(self):
|
|
|
|
fname = support.TESTFN_UNICODE
|
|
|
|
shutil.copyfile(TEST_XMLFILE, fname)
|
|
|
|
self.addCleanup(support.unlink, fname)
|
|
|
|
|
|
|
|
result = StringIO()
|
|
|
|
xmlgen = XMLGenerator(result)
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setContentHandler(xmlgen)
|
|
|
|
parser.parse(fname)
|
|
|
|
|
|
|
|
self.assertEqual(parser.getSystemId(), fname)
|
|
|
|
self.assertEqual(parser.getPublicId(), None)
|
|
|
|
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2000-10-06 18:13:23 -03:00
|
|
|
# ===========================================================================
|
|
|
|
#
|
|
|
|
# error reporting
|
|
|
|
#
|
|
|
|
# ===========================================================================
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class ErrorReportingTest(unittest.TestCase):
|
|
|
|
def test_expat_inpsource_location(self):
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setContentHandler(ContentHandler()) # do nothing
|
|
|
|
source = InputSource()
|
|
|
|
source.setByteStream(StringIO("<foo bar foobar>")) #ill-formed
|
|
|
|
name = "a file name"
|
|
|
|
source.setSystemId(name)
|
|
|
|
try:
|
|
|
|
parser.parse(source)
|
|
|
|
self.fail()
|
|
|
|
except SAXException, e:
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(e.getSystemId(), name)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_expat_incomplete(self):
|
|
|
|
parser = create_parser()
|
|
|
|
parser.setContentHandler(ContentHandler()) # do nothing
|
|
|
|
self.assertRaises(SAXParseException, parser.parse, StringIO("<foo>"))
|
2015-05-06 03:35:52 -03:00
|
|
|
self.assertEqual(parser.getColumnNumber(), 5)
|
|
|
|
self.assertEqual(parser.getLineNumber(), 1)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_sax_parse_exception_str(self):
|
|
|
|
# pass various values from a locator to the SAXParseException to
|
|
|
|
# make sure that the __str__() doesn't fall apart when None is
|
|
|
|
# passed instead of an integer line and column number
|
|
|
|
#
|
|
|
|
# use "normal" values for the locator:
|
|
|
|
str(SAXParseException("message", None,
|
|
|
|
self.DummyLocator(1, 1)))
|
|
|
|
# use None for the line number:
|
|
|
|
str(SAXParseException("message", None,
|
|
|
|
self.DummyLocator(None, 1)))
|
|
|
|
# use None for the column number:
|
|
|
|
str(SAXParseException("message", None,
|
|
|
|
self.DummyLocator(1, None)))
|
|
|
|
# use None for both:
|
|
|
|
str(SAXParseException("message", None,
|
|
|
|
self.DummyLocator(None, None)))
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class DummyLocator:
|
|
|
|
def __init__(self, lineno, colno):
|
|
|
|
self._lineno = lineno
|
|
|
|
self._colno = colno
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def getPublicId(self):
|
|
|
|
return "pubid"
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def getSystemId(self):
|
|
|
|
return "sysid"
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def getLineNumber(self):
|
|
|
|
return self._lineno
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def getColumnNumber(self):
|
|
|
|
return self._colno
|
2000-10-06 18:13:23 -03:00
|
|
|
|
2000-09-24 15:40:52 -03:00
|
|
|
# ===========================================================================
|
|
|
|
#
|
|
|
|
# xmlreader tests
|
|
|
|
#
|
|
|
|
# ===========================================================================
|
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
class XmlReaderTest(XmlTestBase):
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
# ===== AttributesImpl
|
|
|
|
def test_attrs_empty(self):
|
|
|
|
self.verify_empty_attrs(AttributesImpl({}))
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_attrs_wattr(self):
|
|
|
|
self.verify_attrs_wattr(AttributesImpl({"attr" : "val"}))
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_nsattrs_empty(self):
|
|
|
|
self.verify_empty_nsattrs(AttributesNSImpl({}, {}))
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_nsattrs_wattr(self):
|
|
|
|
attrs = AttributesNSImpl({(ns_uri, "attr") : "val"},
|
|
|
|
{(ns_uri, "attr") : "ns:attr"})
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.getLength(), 1)
|
|
|
|
self.assertEqual(attrs.getNames(), [(ns_uri, "attr")])
|
|
|
|
self.assertEqual(attrs.getQNames(), ["ns:attr"])
|
|
|
|
self.assertEqual(len(attrs), 1)
|
2007-03-28 20:34:06 -03:00
|
|
|
self.assertTrue(attrs.has_key((ns_uri, "attr")))
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(attrs.keys(), [(ns_uri, "attr")])
|
|
|
|
self.assertEqual(attrs.get((ns_uri, "attr")), "val")
|
|
|
|
self.assertEqual(attrs.get((ns_uri, "attr"), 25), "val")
|
|
|
|
self.assertEqual(attrs.items(), [((ns_uri, "attr"), "val")])
|
|
|
|
self.assertEqual(attrs.values(), ["val"])
|
|
|
|
self.assertEqual(attrs.getValue((ns_uri, "attr")), "val")
|
|
|
|
self.assertEqual(attrs.getValueByQName("ns:attr"), "val")
|
|
|
|
self.assertEqual(attrs.getNameByQName("ns:attr"), (ns_uri, "attr"))
|
|
|
|
self.assertEqual(attrs[(ns_uri, "attr")], "val")
|
|
|
|
self.assertEqual(attrs.getQNameByName((ns_uri, "attr")), "ns:attr")
|
2007-03-28 20:34:06 -03:00
|
|
|
|
|
|
|
|
|
|
|
# During the development of Python 2.5, an attempt to move the "xml"
|
|
|
|
# package implementation to a new package ("xmlcore") proved painful.
|
|
|
|
# The goal of this change was to allow applications to be able to
|
|
|
|
# obtain and rely on behavior in the standard library implementation
|
|
|
|
# of the XML support without needing to be concerned about the
|
|
|
|
# availability of the PyXML implementation.
|
|
|
|
#
|
|
|
|
# While the existing import hackery in Lib/xml/__init__.py can cause
|
|
|
|
# PyXML's _xmlpus package to supplant the "xml" package, that only
|
|
|
|
# works because either implementation uses the "xml" package name for
|
|
|
|
# imports.
|
|
|
|
#
|
|
|
|
# The move resulted in a number of problems related to the fact that
|
|
|
|
# the import machinery's "package context" is based on the name that's
|
|
|
|
# being imported rather than the __name__ of the actual package
|
|
|
|
# containment; it wasn't possible for the "xml" package to be replaced
|
|
|
|
# by a simple module that indirected imports to the "xmlcore" package.
|
|
|
|
#
|
|
|
|
# The following two tests exercised bugs that were introduced in that
|
|
|
|
# attempt. Keeping these tests around will help detect problems with
|
|
|
|
# other attempts to provide reliable access to the standard library's
|
|
|
|
# implementation of the XML support.
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_sf_1511497(self):
|
|
|
|
# Bug report: http://www.python.org/sf/1511497
|
|
|
|
import sys
|
|
|
|
old_modules = sys.modules.copy()
|
|
|
|
for modname in sys.modules.keys():
|
|
|
|
if modname.startswith("xml."):
|
|
|
|
del sys.modules[modname]
|
|
|
|
try:
|
|
|
|
import xml.sax.expatreader
|
|
|
|
module = xml.sax.expatreader
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(module.__name__, "xml.sax.expatreader")
|
2007-03-28 20:34:06 -03:00
|
|
|
finally:
|
|
|
|
sys.modules.update(old_modules)
|
2007-04-25 03:30:05 -03:00
|
|
|
|
2007-03-28 20:34:06 -03:00
|
|
|
def test_sf_1513611(self):
|
|
|
|
# Bug report: http://www.python.org/sf/1513611
|
|
|
|
sio = StringIO("invalid")
|
|
|
|
parser = make_parser()
|
|
|
|
from xml.sax import SAXParseException
|
|
|
|
self.assertRaises(SAXParseException, parser.parse, sio)
|
|
|
|
|
|
|
|
|
2008-03-28 04:36:31 -03:00
|
|
|
def test_main():
|
2007-03-28 20:34:06 -03:00
|
|
|
run_unittest(MakeParserTest,
|
2015-04-02 17:05:23 -03:00
|
|
|
ParseTest,
|
2007-03-28 20:34:06 -03:00
|
|
|
SaxutilsTest,
|
2015-04-02 14:55:46 -03:00
|
|
|
PrepareInputSourceTest,
|
2013-02-10 08:26:08 -04:00
|
|
|
StringXmlgenTest,
|
|
|
|
BytesIOXmlgenTest,
|
|
|
|
WriterXmlgenTest,
|
2007-03-28 20:34:06 -03:00
|
|
|
ExpatReaderTest,
|
|
|
|
ErrorReportingTest,
|
|
|
|
XmlReaderTest)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2008-03-28 04:36:31 -03:00
|
|
|
test_main()
|