add a filterfunc to zip file.PyZipFile.writepy, issue 19274
This commit is contained in:
parent
47f1b762fd
commit
59202e5fc7
|
@ -382,7 +382,10 @@ The :class:`PyZipFile` constructor takes the same parameters as the
|
|||
|
||||
Instances have one method in addition to those of :class:`ZipFile` objects:
|
||||
|
||||
.. method:: PyZipFile.writepy(pathname, basename='')
|
||||
.. method:: PyZipFile.writepy(pathname, basename='', filterfunc=None)
|
||||
|
||||
.. versionadded:: 3.4
|
||||
The *filterfunc* parameter.
|
||||
|
||||
Search for files :file:`\*.py` and add the corresponding file to the
|
||||
archive.
|
||||
|
@ -404,7 +407,10 @@ The :class:`PyZipFile` constructor takes the same parameters as the
|
|||
package directory, then all :file:`\*.py[co]` are added under the package
|
||||
name as a file path, and if any subdirectories are package directories,
|
||||
all of these are added recursively. *basename* is intended for internal
|
||||
use only. The :meth:`writepy` method makes archives with file names like
|
||||
use only. When *filterfunc(pathname)* is given, it will be called for every
|
||||
invocation. When it returns a False value, that path and its subpaths will
|
||||
be ignored.
|
||||
The :meth:`writepy` method makes archives with file names like
|
||||
this::
|
||||
|
||||
string.pyc # Top level name
|
||||
|
|
|
@ -564,6 +564,16 @@ Add an event-driven parser for non-blocking applications,
|
|||
|
||||
(Contributed by Antoine Pitrou in :issue:`17741`.)
|
||||
|
||||
|
||||
zipfile.PyZipfile
|
||||
-----------------
|
||||
|
||||
Add a filter function to ignore some packages (tests for instance),
|
||||
:meth:`~zipfile.PyZipFile.writepy`.
|
||||
|
||||
(Contributed by Christian Tismer in :issue:`19274`.)
|
||||
|
||||
|
||||
Other improvements
|
||||
==================
|
||||
|
||||
|
|
|
@ -591,6 +591,28 @@ class PyZipFileTests(unittest.TestCase):
|
|||
self.assertCompiledIn('email/__init__.py', names)
|
||||
self.assertCompiledIn('email/mime/text.py', names)
|
||||
|
||||
def test_write_filtered_python_package(self):
|
||||
import test
|
||||
packagedir = os.path.dirname(test.__file__)
|
||||
|
||||
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
|
||||
|
||||
stdout = sys.stdout
|
||||
|
||||
# first make sure that the test folder gives error messages
|
||||
sys.stdout = reportSIO = io.StringIO()
|
||||
zipfp.writepy(packagedir)
|
||||
reportStr = reportSIO.getvalue()
|
||||
self.assertTrue('SyntaxError' in reportStr)
|
||||
|
||||
# then check that the filter works
|
||||
sys.stdout = reportSIO = io.StringIO()
|
||||
zipfp.writepy(packagedir, filterfunc=lambda whatever:False)
|
||||
reportStr = reportSIO.getvalue()
|
||||
self.assertTrue('SyntaxError' not in reportStr)
|
||||
|
||||
sys.stdout = stdout
|
||||
|
||||
def test_write_with_optimization(self):
|
||||
import email
|
||||
packagedir = os.path.dirname(email.__file__)
|
||||
|
@ -600,7 +622,7 @@ class PyZipFileTests(unittest.TestCase):
|
|||
ext = '.pyo' if optlevel == 1 else '.pyc'
|
||||
|
||||
with TemporaryFile() as t, \
|
||||
zipfile.PyZipFile(t, "w", optimize=optlevel) as zipfp:
|
||||
zipfile.PyZipFile(t, "w", optimize=optlevel) as zipfp:
|
||||
zipfp.writepy(packagedir)
|
||||
|
||||
names = zipfp.namelist()
|
||||
|
@ -733,25 +755,25 @@ class ExtractTests(unittest.TestCase):
|
|||
def test_extract_hackers_arcnames_windows_only(self):
|
||||
"""Test combination of path fixing and windows name sanitization."""
|
||||
windows_hacknames = [
|
||||
(r'..\foo\bar', 'foo/bar'),
|
||||
(r'..\/foo\/bar', 'foo/bar'),
|
||||
(r'foo/\..\/bar', 'foo/bar'),
|
||||
(r'foo\/../\bar', 'foo/bar'),
|
||||
(r'C:foo/bar', 'foo/bar'),
|
||||
(r'C:/foo/bar', 'foo/bar'),
|
||||
(r'C://foo/bar', 'foo/bar'),
|
||||
(r'C:\foo\bar', 'foo/bar'),
|
||||
(r'//conky/mountpoint/foo/bar', 'foo/bar'),
|
||||
(r'\\conky\mountpoint\foo\bar', 'foo/bar'),
|
||||
(r'///conky/mountpoint/foo/bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'\\\conky\mountpoint\foo\bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'//conky//mountpoint/foo/bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'\\conky\\mountpoint\foo\bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'//?/C:/foo/bar', 'foo/bar'),
|
||||
(r'\\?\C:\foo\bar', 'foo/bar'),
|
||||
(r'C:/../C:/foo/bar', 'C_/foo/bar'),
|
||||
(r'a:b\c<d>e|f"g?h*i', 'b/c_d_e_f_g_h_i'),
|
||||
('../../foo../../ba..r', 'foo/ba..r'),
|
||||
(r'..\foo\bar', 'foo/bar'),
|
||||
(r'..\/foo\/bar', 'foo/bar'),
|
||||
(r'foo/\..\/bar', 'foo/bar'),
|
||||
(r'foo\/../\bar', 'foo/bar'),
|
||||
(r'C:foo/bar', 'foo/bar'),
|
||||
(r'C:/foo/bar', 'foo/bar'),
|
||||
(r'C://foo/bar', 'foo/bar'),
|
||||
(r'C:\foo\bar', 'foo/bar'),
|
||||
(r'//conky/mountpoint/foo/bar', 'foo/bar'),
|
||||
(r'\\conky\mountpoint\foo\bar', 'foo/bar'),
|
||||
(r'///conky/mountpoint/foo/bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'\\\conky\mountpoint\foo\bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'//conky//mountpoint/foo/bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'\\conky\\mountpoint\foo\bar', 'conky/mountpoint/foo/bar'),
|
||||
(r'//?/C:/foo/bar', 'foo/bar'),
|
||||
(r'\\?\C:\foo\bar', 'foo/bar'),
|
||||
(r'C:/../C:/foo/bar', 'C_/foo/bar'),
|
||||
(r'a:b\c<d>e|f"g?h*i', 'b/c_d_e_f_g_h_i'),
|
||||
('../../foo../../ba..r', 'foo/ba..r'),
|
||||
]
|
||||
self._test_extract_hackers_arcnames(windows_hacknames)
|
||||
|
||||
|
@ -877,10 +899,10 @@ class OtherTests(unittest.TestCase):
|
|||
def test_unsupported_version(self):
|
||||
# File has an extract_version of 120
|
||||
data = (b'PK\x03\x04x\x00\x00\x00\x00\x00!p\xa1@\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00xPK\x01\x02x\x03x\x00\x00\x00\x00'
|
||||
b'\x00!p\xa1@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x01\x00\x00\x00\x00xPK\x05\x06'
|
||||
b'\x00\x00\x00\x00\x01\x00\x01\x00/\x00\x00\x00\x1f\x00\x00\x00\x00\x00')
|
||||
b'\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00xPK\x01\x02x\x03x\x00\x00\x00\x00'
|
||||
b'\x00!p\xa1@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x01\x00\x00\x00\x00xPK\x05\x06'
|
||||
b'\x00\x00\x00\x00\x01\x00\x01\x00/\x00\x00\x00\x1f\x00\x00\x00\x00\x00')
|
||||
|
||||
self.assertRaises(NotImplementedError, zipfile.ZipFile,
|
||||
io.BytesIO(data), 'r')
|
||||
|
@ -1066,11 +1088,11 @@ class OtherTests(unittest.TestCase):
|
|||
def test_unsupported_compression(self):
|
||||
# data is declared as shrunk, but actually deflated
|
||||
data = (b'PK\x03\x04.\x00\x00\x00\x01\x00\xe4C\xa1@\x00\x00\x00'
|
||||
b'\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00x\x03\x00PK\x01'
|
||||
b'\x02.\x03.\x00\x00\x00\x01\x00\xe4C\xa1@\x00\x00\x00\x00\x02\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x80\x01\x00\x00\x00\x00xPK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00'
|
||||
b'/\x00\x00\x00!\x00\x00\x00\x00\x00')
|
||||
b'\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00x\x03\x00PK\x01'
|
||||
b'\x02.\x03.\x00\x00\x00\x01\x00\xe4C\xa1@\x00\x00\x00\x00\x02\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x80\x01\x00\x00\x00\x00xPK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00'
|
||||
b'/\x00\x00\x00!\x00\x00\x00\x00\x00')
|
||||
with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
|
||||
self.assertRaises(NotImplementedError, zipf.open, 'x')
|
||||
|
||||
|
@ -1232,57 +1254,57 @@ class AbstractBadCrcTests:
|
|||
class StoredBadCrcTests(AbstractBadCrcTests, unittest.TestCase):
|
||||
compression = zipfile.ZIP_STORED
|
||||
zip_with_bad_crc = (
|
||||
b'PK\003\004\024\0\0\0\0\0 \213\212;:r'
|
||||
b'\253\377\f\0\0\0\f\0\0\0\005\0\0\000af'
|
||||
b'ilehello,AworldP'
|
||||
b'K\001\002\024\003\024\0\0\0\0\0 \213\212;:'
|
||||
b'r\253\377\f\0\0\0\f\0\0\0\005\0\0\0\0'
|
||||
b'\0\0\0\0\0\0\0\200\001\0\0\0\000afi'
|
||||
b'lePK\005\006\0\0\0\0\001\0\001\0003\000'
|
||||
b'\0\0/\0\0\0\0\0')
|
||||
b'PK\003\004\024\0\0\0\0\0 \213\212;:r'
|
||||
b'\253\377\f\0\0\0\f\0\0\0\005\0\0\000af'
|
||||
b'ilehello,AworldP'
|
||||
b'K\001\002\024\003\024\0\0\0\0\0 \213\212;:'
|
||||
b'r\253\377\f\0\0\0\f\0\0\0\005\0\0\0\0'
|
||||
b'\0\0\0\0\0\0\0\200\001\0\0\0\000afi'
|
||||
b'lePK\005\006\0\0\0\0\001\0\001\0003\000'
|
||||
b'\0\0/\0\0\0\0\0')
|
||||
|
||||
@requires_zlib
|
||||
class DeflateBadCrcTests(AbstractBadCrcTests, unittest.TestCase):
|
||||
compression = zipfile.ZIP_DEFLATED
|
||||
zip_with_bad_crc = (
|
||||
b'PK\x03\x04\x14\x00\x00\x00\x08\x00n}\x0c=FA'
|
||||
b'KE\x10\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
|
||||
b'ile\xcbH\xcd\xc9\xc9W(\xcf/\xcaI\xc9\xa0'
|
||||
b'=\x13\x00PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00n'
|
||||
b'}\x0c=FAKE\x10\x00\x00\x00n\x00\x00\x00\x05'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x01\x00\x00\x00'
|
||||
b'\x00afilePK\x05\x06\x00\x00\x00\x00\x01\x00'
|
||||
b'\x01\x003\x00\x00\x003\x00\x00\x00\x00\x00')
|
||||
b'PK\x03\x04\x14\x00\x00\x00\x08\x00n}\x0c=FA'
|
||||
b'KE\x10\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
|
||||
b'ile\xcbH\xcd\xc9\xc9W(\xcf/\xcaI\xc9\xa0'
|
||||
b'=\x13\x00PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00n'
|
||||
b'}\x0c=FAKE\x10\x00\x00\x00n\x00\x00\x00\x05'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x01\x00\x00\x00'
|
||||
b'\x00afilePK\x05\x06\x00\x00\x00\x00\x01\x00'
|
||||
b'\x01\x003\x00\x00\x003\x00\x00\x00\x00\x00')
|
||||
|
||||
@requires_bz2
|
||||
class Bzip2BadCrcTests(AbstractBadCrcTests, unittest.TestCase):
|
||||
compression = zipfile.ZIP_BZIP2
|
||||
zip_with_bad_crc = (
|
||||
b'PK\x03\x04\x14\x03\x00\x00\x0c\x00nu\x0c=FA'
|
||||
b'KE8\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
|
||||
b'ileBZh91AY&SY\xd4\xa8\xca'
|
||||
b'\x7f\x00\x00\x0f\x11\x80@\x00\x06D\x90\x80 \x00 \xa5'
|
||||
b'P\xd9!\x03\x03\x13\x13\x13\x89\xa9\xa9\xc2u5:\x9f'
|
||||
b'\x8b\xb9"\x9c(HjTe?\x80PK\x01\x02\x14'
|
||||
b'\x03\x14\x03\x00\x00\x0c\x00nu\x0c=FAKE8'
|
||||
b'\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00 \x80\x80\x81\x00\x00\x00\x00afilePK'
|
||||
b'\x05\x06\x00\x00\x00\x00\x01\x00\x01\x003\x00\x00\x00[\x00'
|
||||
b'\x00\x00\x00\x00')
|
||||
b'PK\x03\x04\x14\x03\x00\x00\x0c\x00nu\x0c=FA'
|
||||
b'KE8\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
|
||||
b'ileBZh91AY&SY\xd4\xa8\xca'
|
||||
b'\x7f\x00\x00\x0f\x11\x80@\x00\x06D\x90\x80 \x00 \xa5'
|
||||
b'P\xd9!\x03\x03\x13\x13\x13\x89\xa9\xa9\xc2u5:\x9f'
|
||||
b'\x8b\xb9"\x9c(HjTe?\x80PK\x01\x02\x14'
|
||||
b'\x03\x14\x03\x00\x00\x0c\x00nu\x0c=FAKE8'
|
||||
b'\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00 \x80\x80\x81\x00\x00\x00\x00afilePK'
|
||||
b'\x05\x06\x00\x00\x00\x00\x01\x00\x01\x003\x00\x00\x00[\x00'
|
||||
b'\x00\x00\x00\x00')
|
||||
|
||||
@requires_lzma
|
||||
class LzmaBadCrcTests(AbstractBadCrcTests, unittest.TestCase):
|
||||
compression = zipfile.ZIP_LZMA
|
||||
zip_with_bad_crc = (
|
||||
b'PK\x03\x04\x14\x03\x00\x00\x0e\x00nu\x0c=FA'
|
||||
b'KE\x1b\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
|
||||
b'ile\t\x04\x05\x00]\x00\x00\x00\x04\x004\x19I'
|
||||
b'\xee\x8d\xe9\x17\x89:3`\tq!.8\x00PK'
|
||||
b'\x01\x02\x14\x03\x14\x03\x00\x00\x0e\x00nu\x0c=FA'
|
||||
b'KE\x1b\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00 \x80\x80\x81\x00\x00\x00\x00afil'
|
||||
b'ePK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x003\x00\x00'
|
||||
b'\x00>\x00\x00\x00\x00\x00')
|
||||
b'PK\x03\x04\x14\x03\x00\x00\x0e\x00nu\x0c=FA'
|
||||
b'KE\x1b\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
|
||||
b'ile\t\x04\x05\x00]\x00\x00\x00\x04\x004\x19I'
|
||||
b'\xee\x8d\xe9\x17\x89:3`\tq!.8\x00PK'
|
||||
b'\x01\x02\x14\x03\x14\x03\x00\x00\x0e\x00nu\x0c=FA'
|
||||
b'KE\x1b\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00 \x80\x80\x81\x00\x00\x00\x00afil'
|
||||
b'ePK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x003\x00\x00'
|
||||
b'\x00>\x00\x00\x00\x00\x00')
|
||||
|
||||
|
||||
class DecryptionTests(unittest.TestCase):
|
||||
|
@ -1291,22 +1313,22 @@ class DecryptionTests(unittest.TestCase):
|
|||
ZIP file."""
|
||||
|
||||
data = (
|
||||
b'PK\x03\x04\x14\x00\x01\x00\x00\x00n\x92i.#y\xef?&\x00\x00\x00\x1a\x00'
|
||||
b'\x00\x00\x08\x00\x00\x00test.txt\xfa\x10\xa0gly|\xfa-\xc5\xc0=\xf9y'
|
||||
b'\x18\xe0\xa8r\xb3Z}Lg\xbc\xae\xf9|\x9b\x19\xe4\x8b\xba\xbb)\x8c\xb0\xdbl'
|
||||
b'PK\x01\x02\x14\x00\x14\x00\x01\x00\x00\x00n\x92i.#y\xef?&\x00\x00\x00'
|
||||
b'\x1a\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x01\x00 \x00\xb6\x81'
|
||||
b'\x00\x00\x00\x00test.txtPK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x006\x00'
|
||||
b'\x00\x00L\x00\x00\x00\x00\x00' )
|
||||
b'PK\x03\x04\x14\x00\x01\x00\x00\x00n\x92i.#y\xef?&\x00\x00\x00\x1a\x00'
|
||||
b'\x00\x00\x08\x00\x00\x00test.txt\xfa\x10\xa0gly|\xfa-\xc5\xc0=\xf9y'
|
||||
b'\x18\xe0\xa8r\xb3Z}Lg\xbc\xae\xf9|\x9b\x19\xe4\x8b\xba\xbb)\x8c\xb0\xdbl'
|
||||
b'PK\x01\x02\x14\x00\x14\x00\x01\x00\x00\x00n\x92i.#y\xef?&\x00\x00\x00'
|
||||
b'\x1a\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x01\x00 \x00\xb6\x81'
|
||||
b'\x00\x00\x00\x00test.txtPK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x006\x00'
|
||||
b'\x00\x00L\x00\x00\x00\x00\x00' )
|
||||
data2 = (
|
||||
b'PK\x03\x04\x14\x00\t\x00\x08\x00\xcf}38xu\xaa\xb2\x14\x00\x00\x00\x00\x02'
|
||||
b'\x00\x00\x04\x00\x15\x00zeroUT\t\x00\x03\xd6\x8b\x92G\xda\x8b\x92GUx\x04'
|
||||
b'\x00\xe8\x03\xe8\x03\xc7<M\xb5a\xceX\xa3Y&\x8b{oE\xd7\x9d\x8c\x98\x02\xc0'
|
||||
b'PK\x07\x08xu\xaa\xb2\x14\x00\x00\x00\x00\x02\x00\x00PK\x01\x02\x17\x03'
|
||||
b'\x14\x00\t\x00\x08\x00\xcf}38xu\xaa\xb2\x14\x00\x00\x00\x00\x02\x00\x00'
|
||||
b'\x04\x00\r\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x00ze'
|
||||
b'roUT\x05\x00\x03\xd6\x8b\x92GUx\x00\x00PK\x05\x06\x00\x00\x00\x00\x01'
|
||||
b'\x00\x01\x00?\x00\x00\x00[\x00\x00\x00\x00\x00' )
|
||||
b'PK\x03\x04\x14\x00\t\x00\x08\x00\xcf}38xu\xaa\xb2\x14\x00\x00\x00\x00\x02'
|
||||
b'\x00\x00\x04\x00\x15\x00zeroUT\t\x00\x03\xd6\x8b\x92G\xda\x8b\x92GUx\x04'
|
||||
b'\x00\xe8\x03\xe8\x03\xc7<M\xb5a\xceX\xa3Y&\x8b{oE\xd7\x9d\x8c\x98\x02\xc0'
|
||||
b'PK\x07\x08xu\xaa\xb2\x14\x00\x00\x00\x00\x02\x00\x00PK\x01\x02\x17\x03'
|
||||
b'\x14\x00\t\x00\x08\x00\xcf}38xu\xaa\xb2\x14\x00\x00\x00\x00\x02\x00\x00'
|
||||
b'\x04\x00\r\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x00ze'
|
||||
b'roUT\x05\x00\x03\xd6\x8b\x92GUx\x00\x00PK\x05\x06\x00\x00\x00\x00\x01'
|
||||
b'\x00\x01\x00?\x00\x00\x00[\x00\x00\x00\x00\x00' )
|
||||
|
||||
plain = b'zipfile.py encryption test'
|
||||
plain2 = b'\x00'*512
|
||||
|
|
161
Lib/zipfile.py
161
Lib/zipfile.py
|
@ -211,8 +211,8 @@ def _EndRecData64(fpin, offset, endrec):
|
|||
if len(data) != sizeEndCentDir64:
|
||||
return endrec
|
||||
sig, sz, create_version, read_version, disk_num, disk_dir, \
|
||||
dircount, dircount2, dirsize, diroffset = \
|
||||
struct.unpack(structEndArchive64, data)
|
||||
dircount, dircount2, dirsize, diroffset = \
|
||||
struct.unpack(structEndArchive64, data)
|
||||
if sig != stringEndArchive64:
|
||||
return endrec
|
||||
|
||||
|
@ -292,26 +292,26 @@ class ZipInfo (object):
|
|||
"""Class with attributes describing each file in the ZIP archive."""
|
||||
|
||||
__slots__ = (
|
||||
'orig_filename',
|
||||
'filename',
|
||||
'date_time',
|
||||
'compress_type',
|
||||
'comment',
|
||||
'extra',
|
||||
'create_system',
|
||||
'create_version',
|
||||
'extract_version',
|
||||
'reserved',
|
||||
'flag_bits',
|
||||
'volume',
|
||||
'internal_attr',
|
||||
'external_attr',
|
||||
'header_offset',
|
||||
'CRC',
|
||||
'compress_size',
|
||||
'file_size',
|
||||
'_raw_time',
|
||||
)
|
||||
'orig_filename',
|
||||
'filename',
|
||||
'date_time',
|
||||
'compress_type',
|
||||
'comment',
|
||||
'extra',
|
||||
'create_system',
|
||||
'create_version',
|
||||
'extract_version',
|
||||
'reserved',
|
||||
'flag_bits',
|
||||
'volume',
|
||||
'internal_attr',
|
||||
'external_attr',
|
||||
'header_offset',
|
||||
'CRC',
|
||||
'compress_size',
|
||||
'file_size',
|
||||
'_raw_time',
|
||||
)
|
||||
|
||||
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
|
||||
self.orig_filename = filename # Original file name in archive
|
||||
|
@ -376,7 +376,7 @@ class ZipInfo (object):
|
|||
if zip64:
|
||||
fmt = '<HHQQ'
|
||||
extra = extra + struct.pack(fmt,
|
||||
1, struct.calcsize(fmt)-4, file_size, compress_size)
|
||||
1, struct.calcsize(fmt)-4, file_size, compress_size)
|
||||
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
|
||||
if not zip64:
|
||||
raise LargeZipFile("Filesize would require ZIP64 extensions")
|
||||
|
@ -395,10 +395,10 @@ class ZipInfo (object):
|
|||
self.create_version = max(min_version, self.create_version)
|
||||
filename, flag_bits = self._encodeFilenameFlags()
|
||||
header = struct.pack(structFileHeader, stringFileHeader,
|
||||
self.extract_version, self.reserved, flag_bits,
|
||||
self.compress_type, dostime, dosdate, CRC,
|
||||
compress_size, file_size,
|
||||
len(filename), len(extra))
|
||||
self.extract_version, self.reserved, flag_bits,
|
||||
self.compress_type, dostime, dosdate, CRC,
|
||||
compress_size, file_size,
|
||||
len(filename), len(extra))
|
||||
return header + filename + extra
|
||||
|
||||
def _encodeFilenameFlags(self):
|
||||
|
@ -511,7 +511,7 @@ class LZMACompressor:
|
|||
def _init(self):
|
||||
props = lzma._encode_filter_properties({'id': lzma.FILTER_LZMA1})
|
||||
self._comp = lzma.LZMACompressor(lzma.FORMAT_RAW, filters=[
|
||||
lzma._decode_filter_properties(lzma.FILTER_LZMA1, props)
|
||||
lzma._decode_filter_properties(lzma.FILTER_LZMA1, props)
|
||||
])
|
||||
return struct.pack('<BBH', 9, 4, len(props)) + props
|
||||
|
||||
|
@ -543,8 +543,8 @@ class LZMADecompressor:
|
|||
return b''
|
||||
|
||||
self._decomp = lzma.LZMADecompressor(lzma.FORMAT_RAW, filters=[
|
||||
lzma._decode_filter_properties(lzma.FILTER_LZMA1,
|
||||
self._unconsumed[4:4 + psize])
|
||||
lzma._decode_filter_properties(lzma.FILTER_LZMA1,
|
||||
self._unconsumed[4:4 + psize])
|
||||
])
|
||||
data = self._unconsumed[4 + psize:]
|
||||
del self._unconsumed
|
||||
|
@ -580,15 +580,15 @@ def _check_compression(compression):
|
|||
elif compression == ZIP_DEFLATED:
|
||||
if not zlib:
|
||||
raise RuntimeError(
|
||||
"Compression requires the (missing) zlib module")
|
||||
"Compression requires the (missing) zlib module")
|
||||
elif compression == ZIP_BZIP2:
|
||||
if not bz2:
|
||||
raise RuntimeError(
|
||||
"Compression requires the (missing) bz2 module")
|
||||
"Compression requires the (missing) bz2 module")
|
||||
elif compression == ZIP_LZMA:
|
||||
if not lzma:
|
||||
raise RuntimeError(
|
||||
"Compression requires the (missing) lzma module")
|
||||
"Compression requires the (missing) lzma module")
|
||||
else:
|
||||
raise RuntimeError("That compression method is not supported")
|
||||
|
||||
|
@ -596,7 +596,7 @@ def _check_compression(compression):
|
|||
def _get_compressor(compress_type):
|
||||
if compress_type == ZIP_DEFLATED:
|
||||
return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
|
||||
zlib.DEFLATED, -15)
|
||||
zlib.DEFLATED, -15)
|
||||
elif compress_type == ZIP_BZIP2:
|
||||
return bz2.BZ2Compressor()
|
||||
elif compress_type == ZIP_LZMA:
|
||||
|
@ -836,8 +836,8 @@ class ZipExtFile(io.BufferedIOBase):
|
|||
n = max(n, self.MIN_READ_SIZE)
|
||||
data = self._decompressor.decompress(data, n)
|
||||
self._eof = (self._decompressor.eof or
|
||||
self._compress_left <= 0 and
|
||||
not self._decompressor.unconsumed_tail)
|
||||
self._compress_left <= 0 and
|
||||
not self._decompressor.unconsumed_tail)
|
||||
if self._eof:
|
||||
data += self._decompressor.flush()
|
||||
else:
|
||||
|
@ -1016,8 +1016,8 @@ class ZipFile:
|
|||
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
|
||||
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
|
||||
(x.create_version, x.create_system, x.extract_version, x.reserved,
|
||||
x.flag_bits, x.compress_type, t, d,
|
||||
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
|
||||
x.flag_bits, x.compress_type, t, d,
|
||||
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
|
||||
if x.extract_version > MAX_EXTRACT_VERSION:
|
||||
raise NotImplementedError("zip file version %.1f" %
|
||||
(x.extract_version / 10))
|
||||
|
@ -1025,7 +1025,7 @@ class ZipFile:
|
|||
# Convert date/time code to (year, month, day, hour, min, sec)
|
||||
x._raw_time = t
|
||||
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
|
||||
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
|
||||
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
|
||||
|
||||
x._decodeExtra()
|
||||
x.header_offset = x.header_offset + concat
|
||||
|
@ -1103,7 +1103,7 @@ class ZipFile:
|
|||
if len(comment) >= ZIP_MAX_COMMENT:
|
||||
if self.debug:
|
||||
print('Archive comment is too long; truncating to %d bytes'
|
||||
% ZIP_MAX_COMMENT)
|
||||
% ZIP_MAX_COMMENT)
|
||||
comment = comment[:ZIP_MAX_COMMENT]
|
||||
self._comment = comment
|
||||
self._didModify = True
|
||||
|
@ -1121,7 +1121,7 @@ class ZipFile:
|
|||
raise TypeError("pwd: expected bytes, got %s" % type(pwd))
|
||||
if not self.fp:
|
||||
raise RuntimeError(
|
||||
"Attempt to read ZIP archive that was already closed")
|
||||
"Attempt to read ZIP archive that was already closed")
|
||||
|
||||
# Only open a new file for instances where we were not
|
||||
# given a file object in the constructor
|
||||
|
@ -1294,7 +1294,7 @@ class ZipFile:
|
|||
raise RuntimeError('write() requires mode "w" or "a"')
|
||||
if not self.fp:
|
||||
raise RuntimeError(
|
||||
"Attempt to write ZIP archive that was already closed")
|
||||
"Attempt to write ZIP archive that was already closed")
|
||||
_check_compression(zinfo.compress_type)
|
||||
if zinfo.file_size > ZIP64_LIMIT:
|
||||
if not self._allowZip64:
|
||||
|
@ -1302,14 +1302,14 @@ class ZipFile:
|
|||
if zinfo.header_offset > ZIP64_LIMIT:
|
||||
if not self._allowZip64:
|
||||
raise LargeZipFile(
|
||||
"Zipfile size would require ZIP64 extensions")
|
||||
"Zipfile size would require ZIP64 extensions")
|
||||
|
||||
def write(self, filename, arcname=None, compress_type=None):
|
||||
"""Put the bytes from filename into the archive under the name
|
||||
arcname."""
|
||||
if not self.fp:
|
||||
raise RuntimeError(
|
||||
"Attempt to write to ZIP archive that was already closed")
|
||||
"Attempt to write to ZIP archive that was already closed")
|
||||
|
||||
st = os.stat(filename)
|
||||
isdir = stat.S_ISDIR(st.st_mode)
|
||||
|
@ -1356,7 +1356,7 @@ class ZipFile:
|
|||
zinfo.compress_size = compress_size = 0
|
||||
# Compressed size can be larger than uncompressed size
|
||||
zip64 = self._allowZip64 and \
|
||||
zinfo.file_size * 1.05 > ZIP64_LIMIT
|
||||
zinfo.file_size * 1.05 > ZIP64_LIMIT
|
||||
self.fp.write(zinfo.FileHeader(zip64))
|
||||
file_size = 0
|
||||
while 1:
|
||||
|
@ -1410,7 +1410,7 @@ class ZipFile:
|
|||
|
||||
if not self.fp:
|
||||
raise RuntimeError(
|
||||
"Attempt to write to ZIP archive that was already closed")
|
||||
"Attempt to write to ZIP archive that was already closed")
|
||||
|
||||
zinfo.file_size = len(data) # Uncompressed size
|
||||
zinfo.header_offset = self.fp.tell() # Start of header data
|
||||
|
@ -1430,7 +1430,7 @@ class ZipFile:
|
|||
else:
|
||||
zinfo.compress_size = zinfo.file_size
|
||||
zip64 = zinfo.file_size > ZIP64_LIMIT or \
|
||||
zinfo.compress_size > ZIP64_LIMIT
|
||||
zinfo.compress_size > ZIP64_LIMIT
|
||||
if zip64 and not self._allowZip64:
|
||||
raise LargeZipFile("Filesize would require ZIP64 extensions")
|
||||
self.fp.write(zinfo.FileHeader(zip64))
|
||||
|
@ -1439,7 +1439,7 @@ class ZipFile:
|
|||
# Write CRC and file sizes after the file data
|
||||
fmt = '<LQQ' if zip64 else '<LLL'
|
||||
self.fp.write(struct.pack(fmt, zinfo.CRC, zinfo.compress_size,
|
||||
zinfo.file_size))
|
||||
zinfo.file_size))
|
||||
self.fp.flush()
|
||||
self.filelist.append(zinfo)
|
||||
self.NameToInfo[zinfo.filename] = zinfo
|
||||
|
@ -1465,7 +1465,7 @@ class ZipFile:
|
|||
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
|
||||
extra = []
|
||||
if zinfo.file_size > ZIP64_LIMIT \
|
||||
or zinfo.compress_size > ZIP64_LIMIT:
|
||||
or zinfo.compress_size > ZIP64_LIMIT:
|
||||
extra.append(zinfo.file_size)
|
||||
extra.append(zinfo.compress_size)
|
||||
file_size = 0xffffffff
|
||||
|
@ -1485,8 +1485,8 @@ class ZipFile:
|
|||
if extra:
|
||||
# Append a ZIP64 field to the extra's
|
||||
extra_data = struct.pack(
|
||||
'<HH' + 'Q'*len(extra),
|
||||
1, 8*len(extra), *extra) + extra_data
|
||||
'<HH' + 'Q'*len(extra),
|
||||
1, 8*len(extra), *extra) + extra_data
|
||||
|
||||
min_version = ZIP64_VERSION
|
||||
|
||||
|
@ -1500,21 +1500,21 @@ class ZipFile:
|
|||
try:
|
||||
filename, flag_bits = zinfo._encodeFilenameFlags()
|
||||
centdir = struct.pack(structCentralDir,
|
||||
stringCentralDir, create_version,
|
||||
zinfo.create_system, extract_version, zinfo.reserved,
|
||||
flag_bits, zinfo.compress_type, dostime, dosdate,
|
||||
zinfo.CRC, compress_size, file_size,
|
||||
len(filename), len(extra_data), len(zinfo.comment),
|
||||
0, zinfo.internal_attr, zinfo.external_attr,
|
||||
header_offset)
|
||||
stringCentralDir, create_version,
|
||||
zinfo.create_system, extract_version, zinfo.reserved,
|
||||
flag_bits, zinfo.compress_type, dostime, dosdate,
|
||||
zinfo.CRC, compress_size, file_size,
|
||||
len(filename), len(extra_data), len(zinfo.comment),
|
||||
0, zinfo.internal_attr, zinfo.external_attr,
|
||||
header_offset)
|
||||
except DeprecationWarning:
|
||||
print((structCentralDir, stringCentralDir, create_version,
|
||||
zinfo.create_system, extract_version, zinfo.reserved,
|
||||
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
|
||||
zinfo.CRC, compress_size, file_size,
|
||||
len(zinfo.filename), len(extra_data), len(zinfo.comment),
|
||||
0, zinfo.internal_attr, zinfo.external_attr,
|
||||
header_offset), file=sys.stderr)
|
||||
zinfo.create_system, extract_version, zinfo.reserved,
|
||||
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
|
||||
zinfo.CRC, compress_size, file_size,
|
||||
len(zinfo.filename), len(extra_data), len(zinfo.comment),
|
||||
0, zinfo.internal_attr, zinfo.external_attr,
|
||||
header_offset), file=sys.stderr)
|
||||
raise
|
||||
self.fp.write(centdir)
|
||||
self.fp.write(filename)
|
||||
|
@ -1531,22 +1531,22 @@ class ZipFile:
|
|||
centDirSize > ZIP64_LIMIT):
|
||||
# Need to write the ZIP64 end-of-archive records
|
||||
zip64endrec = struct.pack(
|
||||
structEndArchive64, stringEndArchive64,
|
||||
44, 45, 45, 0, 0, centDirCount, centDirCount,
|
||||
centDirSize, centDirOffset)
|
||||
structEndArchive64, stringEndArchive64,
|
||||
44, 45, 45, 0, 0, centDirCount, centDirCount,
|
||||
centDirSize, centDirOffset)
|
||||
self.fp.write(zip64endrec)
|
||||
|
||||
zip64locrec = struct.pack(
|
||||
structEndArchive64Locator,
|
||||
stringEndArchive64Locator, 0, pos2, 1)
|
||||
structEndArchive64Locator,
|
||||
stringEndArchive64Locator, 0, pos2, 1)
|
||||
self.fp.write(zip64locrec)
|
||||
centDirCount = min(centDirCount, 0xFFFF)
|
||||
centDirSize = min(centDirSize, 0xFFFFFFFF)
|
||||
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
|
||||
|
||||
endrec = struct.pack(structEndArchive, stringEndArchive,
|
||||
0, 0, centDirCount, centDirCount,
|
||||
centDirSize, centDirOffset, len(self._comment))
|
||||
0, 0, centDirCount, centDirCount,
|
||||
centDirSize, centDirOffset, len(self._comment))
|
||||
self.fp.write(endrec)
|
||||
self.fp.write(self._comment)
|
||||
self.fp.flush()
|
||||
|
@ -1566,7 +1566,7 @@ class PyZipFile(ZipFile):
|
|||
allowZip64=allowZip64)
|
||||
self._optimize = optimize
|
||||
|
||||
def writepy(self, pathname, basename=""):
|
||||
def writepy(self, pathname, basename="", filterfunc=None):
|
||||
"""Add all files from "pathname" to the ZIP archive.
|
||||
|
||||
If pathname is a package directory, search the directory and
|
||||
|
@ -1577,7 +1577,13 @@ class PyZipFile(ZipFile):
|
|||
archive. Added modules are always module.pyo or module.pyc.
|
||||
This method will compile the module.py into module.pyc if
|
||||
necessary.
|
||||
If filterfunc(pathname) is given, it is called with every argument.
|
||||
When it is False, the file or directory is skipped.
|
||||
"""
|
||||
if filterfunc and not filterfunc(pathname):
|
||||
if self.debug:
|
||||
print('pathname "%s" skipped by filterfunc' % pathname)
|
||||
return
|
||||
dir, name = os.path.split(pathname)
|
||||
if os.path.isdir(pathname):
|
||||
initname = os.path.join(pathname, "__init__.py")
|
||||
|
@ -1602,10 +1608,11 @@ class PyZipFile(ZipFile):
|
|||
if os.path.isdir(path):
|
||||
if os.path.isfile(os.path.join(path, "__init__.py")):
|
||||
# This is a package directory, add it
|
||||
self.writepy(path, basename) # Recursive call
|
||||
self.writepy(path, basename,
|
||||
filterfunc=filterfunc) # Recursive call
|
||||
elif ext == ".py":
|
||||
fname, arcname = self._get_codename(path[0:-3],
|
||||
basename)
|
||||
basename)
|
||||
if self.debug:
|
||||
print("Adding", arcname)
|
||||
self.write(fname, arcname)
|
||||
|
@ -1618,14 +1625,14 @@ class PyZipFile(ZipFile):
|
|||
root, ext = os.path.splitext(filename)
|
||||
if ext == ".py":
|
||||
fname, arcname = self._get_codename(path[0:-3],
|
||||
basename)
|
||||
basename)
|
||||
if self.debug:
|
||||
print("Adding", arcname)
|
||||
self.write(fname, arcname)
|
||||
else:
|
||||
if pathname[-3:] != ".py":
|
||||
raise RuntimeError(
|
||||
'Files added with writepy() must end with ".py"')
|
||||
'Files added with writepy() must end with ".py"')
|
||||
fname, arcname = self._get_codename(pathname[0:-3], basename)
|
||||
if self.debug:
|
||||
print("Adding file", arcname)
|
||||
|
@ -1764,7 +1771,7 @@ def main(args = None):
|
|||
elif os.path.isdir(path):
|
||||
for nm in os.listdir(path):
|
||||
addToZip(zf,
|
||||
os.path.join(path, nm), os.path.join(zippath, nm))
|
||||
os.path.join(path, nm), os.path.join(zippath, nm))
|
||||
# else: ignore
|
||||
|
||||
with ZipFile(args[1], 'w', allowZip64=True) as zf:
|
||||
|
|
Loading…
Reference in New Issue