2011-12-26 13:17:01 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2009-05-17 09:12:02 -03:00
|
|
|
"""Tests for distutils.archive_util."""
|
|
|
|
import unittest
|
|
|
|
import os
|
2011-12-28 11:45:08 -04:00
|
|
|
import sys
|
2009-05-28 10:01:13 -03:00
|
|
|
import tarfile
|
2009-05-18 09:29:06 -03:00
|
|
|
from os.path import splitdrive
|
2009-05-28 10:01:13 -03:00
|
|
|
import warnings
|
2009-05-17 09:12:02 -03:00
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
from distutils import archive_util
|
2009-05-17 09:12:02 -03:00
|
|
|
from distutils.archive_util import (check_archive_formats, make_tarball,
|
2009-10-24 10:42:10 -03:00
|
|
|
make_zipfile, make_archive,
|
|
|
|
ARCHIVE_FORMATS)
|
2009-05-28 10:01:13 -03:00
|
|
|
from distutils.spawn import find_executable, spawn
|
2009-05-17 09:12:02 -03:00
|
|
|
from distutils.tests import support
|
2011-03-15 17:02:59 -03:00
|
|
|
from test.support import check_warnings, run_unittest, patch
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
try:
|
|
|
|
import zipfile
|
|
|
|
ZIP_SUPPORT = True
|
|
|
|
except ImportError:
|
|
|
|
ZIP_SUPPORT = find_executable('zip')
|
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
try:
|
|
|
|
import zlib
|
|
|
|
ZLIB_SUPPORT = True
|
|
|
|
except ImportError:
|
|
|
|
ZLIB_SUPPORT = False
|
|
|
|
|
2011-12-28 11:45:08 -04:00
|
|
|
def can_fs_encode(filename):
|
|
|
|
"""
|
|
|
|
Return True if the filename can be saved in the file system.
|
|
|
|
"""
|
|
|
|
if os.path.supports_unicode_filenames:
|
|
|
|
return True
|
|
|
|
try:
|
|
|
|
filename.encode(sys.getfilesystemencoding())
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
|
2009-05-17 09:12:02 -03:00
|
|
|
class ArchiveUtilTestCase(support.TempdirManager,
|
2009-05-28 11:02:58 -03:00
|
|
|
support.LoggingSilencer,
|
2009-05-17 09:12:02 -03:00
|
|
|
unittest.TestCase):
|
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
2009-05-17 09:12:02 -03:00
|
|
|
def test_make_tarball(self):
|
2011-12-26 13:17:01 -04:00
|
|
|
self._make_tarball('archive')
|
|
|
|
|
|
|
|
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
2011-12-28 11:45:08 -04:00
|
|
|
@unittest.skipUnless(can_fs_encode('årchiv'),
|
|
|
|
'File system cannot handle this filename')
|
2011-12-26 13:17:01 -04:00
|
|
|
def test_make_tarball_latin1(self):
|
|
|
|
"""
|
|
|
|
Mirror test_make_tarball, except filename contains latin characters.
|
|
|
|
"""
|
|
|
|
self._make_tarball('årchiv') # note this isn't a real word
|
|
|
|
|
|
|
|
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
2011-12-28 11:45:08 -04:00
|
|
|
@unittest.skipUnless(can_fs_encode('のアーカイブ'),
|
|
|
|
'File system cannot handle this filename')
|
2011-12-26 13:17:01 -04:00
|
|
|
def test_make_tarball_extended(self):
|
|
|
|
"""
|
|
|
|
Mirror test_make_tarball, except filename contains extended
|
|
|
|
characters outside the latin charset.
|
|
|
|
"""
|
|
|
|
self._make_tarball('のアーカイブ') # japanese for archive
|
|
|
|
|
|
|
|
def _make_tarball(self, target_name):
|
2009-05-17 09:12:02 -03:00
|
|
|
# creating something to tar
|
|
|
|
tmpdir = self.mkdtemp()
|
|
|
|
self.write_file([tmpdir, 'file1'], 'xxx')
|
|
|
|
self.write_file([tmpdir, 'file2'], 'xxx')
|
2009-05-28 10:01:13 -03:00
|
|
|
os.mkdir(os.path.join(tmpdir, 'sub'))
|
|
|
|
self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
tmpdir2 = self.mkdtemp()
|
2009-05-18 09:29:06 -03:00
|
|
|
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
|
|
|
|
"Source and target should be on same drive")
|
|
|
|
|
2011-12-26 13:17:01 -04:00
|
|
|
base_name = os.path.join(tmpdir2, target_name)
|
2009-05-18 05:22:38 -03:00
|
|
|
|
|
|
|
# working with relative paths to avoid tar warnings
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
2009-05-18 09:29:06 -03:00
|
|
|
make_tarball(splitdrive(base_name)[1], '.')
|
2009-05-18 05:22:38 -03:00
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
# check if the compressed tarball was created
|
|
|
|
tarball = base_name + '.tar.gz'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
# trying an uncompressed one
|
2011-12-26 13:17:01 -04:00
|
|
|
base_name = os.path.join(tmpdir2, target_name)
|
2009-05-18 05:22:38 -03:00
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
2009-05-18 09:29:06 -03:00
|
|
|
make_tarball(splitdrive(base_name)[1], '.', compress=None)
|
2009-05-18 05:22:38 -03:00
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
2009-05-17 09:12:02 -03:00
|
|
|
tarball = base_name + '.tar'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-17 09:12:02 -03:00
|
|
|
|
2009-05-28 10:01:13 -03:00
|
|
|
def _tarinfo(self, path):
|
|
|
|
tar = tarfile.open(path)
|
|
|
|
try:
|
|
|
|
names = tar.getnames()
|
|
|
|
names.sort()
|
|
|
|
return tuple(names)
|
|
|
|
finally:
|
|
|
|
tar.close()
|
|
|
|
|
|
|
|
def _create_files(self):
|
|
|
|
# creating something to tar
|
|
|
|
tmpdir = self.mkdtemp()
|
|
|
|
dist = os.path.join(tmpdir, 'dist')
|
|
|
|
os.mkdir(dist)
|
|
|
|
self.write_file([dist, 'file1'], 'xxx')
|
|
|
|
self.write_file([dist, 'file2'], 'xxx')
|
|
|
|
os.mkdir(os.path.join(dist, 'sub'))
|
|
|
|
self.write_file([dist, 'sub', 'file3'], 'xxx')
|
|
|
|
os.mkdir(os.path.join(dist, 'sub2'))
|
|
|
|
tmpdir2 = self.mkdtemp()
|
|
|
|
base_name = os.path.join(tmpdir2, 'archive')
|
|
|
|
return tmpdir, tmpdir2, base_name
|
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
@unittest.skipUnless(find_executable('tar') and find_executable('gzip')
|
|
|
|
and ZLIB_SUPPORT,
|
|
|
|
'Need the tar, gzip and zlib command to run')
|
2009-05-28 10:01:13 -03:00
|
|
|
def test_tarfile_vs_tar(self):
|
|
|
|
tmpdir, tmpdir2, base_name = self._create_files()
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
make_tarball(base_name, 'dist')
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
|
|
|
|
# check if the compressed tarball was created
|
|
|
|
tarball = base_name + '.tar.gz'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# now create another tarball using `tar`
|
|
|
|
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
|
2009-05-28 11:02:58 -03:00
|
|
|
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
|
|
|
|
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
|
2009-05-28 10:01:13 -03:00
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
2009-05-28 11:02:58 -03:00
|
|
|
spawn(tar_cmd)
|
|
|
|
spawn(gzip_cmd)
|
2009-05-28 10:01:13 -03:00
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball2))
|
2009-05-28 10:01:13 -03:00
|
|
|
# let's compare both tarballs
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# trying an uncompressed one
|
|
|
|
base_name = os.path.join(tmpdir2, 'archive')
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
make_tarball(base_name, 'dist', compress=None)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
tarball = base_name + '.tar'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# now for a dry_run
|
|
|
|
base_name = os.path.join(tmpdir2, 'archive')
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
make_tarball(base_name, 'dist', compress=None, dry_run=True)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
tarball = base_name + '.tar'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
@unittest.skipUnless(find_executable('compress'),
|
|
|
|
'The compress program is required')
|
|
|
|
def test_compress_deprecated(self):
|
|
|
|
tmpdir, tmpdir2, base_name = self._create_files()
|
|
|
|
|
|
|
|
# using compress and testing the PendingDeprecationWarning
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
with check_warnings() as w:
|
|
|
|
warnings.simplefilter("always")
|
|
|
|
make_tarball(base_name, 'dist', compress='compress')
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
tarball = base_name + '.tar.Z'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(len(w.warnings), 1)
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# same test with dry_run
|
|
|
|
os.remove(tarball)
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
with check_warnings() as w:
|
|
|
|
warnings.simplefilter("always")
|
|
|
|
make_tarball(base_name, 'dist', compress='compress',
|
|
|
|
dry_run=True)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(not os.path.exists(tarball))
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(len(w.warnings), 1)
|
2009-05-28 10:01:13 -03:00
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
@unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
|
|
|
|
'Need zip and zlib support to run')
|
2009-05-17 12:03:23 -03:00
|
|
|
def test_make_zipfile(self):
|
2009-05-17 09:12:02 -03:00
|
|
|
# creating something to tar
|
|
|
|
tmpdir = self.mkdtemp()
|
|
|
|
self.write_file([tmpdir, 'file1'], 'xxx')
|
|
|
|
self.write_file([tmpdir, 'file2'], 'xxx')
|
|
|
|
|
|
|
|
tmpdir2 = self.mkdtemp()
|
|
|
|
base_name = os.path.join(tmpdir2, 'archive')
|
|
|
|
make_zipfile(base_name, tmpdir)
|
|
|
|
|
|
|
|
# check if the compressed tarball was created
|
|
|
|
tarball = base_name + '.zip'
|
2011-03-15 17:02:59 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
|
|
|
|
|
|
|
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
|
|
|
|
def test_make_zipfile_no_zlib(self):
|
|
|
|
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
|
|
|
|
|
|
|
|
called = []
|
|
|
|
zipfile_class = zipfile.ZipFile
|
|
|
|
def fake_zipfile(*a, **kw):
|
|
|
|
if kw.get('compression', None) == zipfile.ZIP_STORED:
|
|
|
|
called.append((a, kw))
|
|
|
|
return zipfile_class(*a, **kw)
|
|
|
|
|
|
|
|
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
|
|
|
|
|
|
|
|
# create something to tar and compress
|
|
|
|
tmpdir, tmpdir2, base_name = self._create_files()
|
|
|
|
make_zipfile(base_name, tmpdir)
|
|
|
|
|
|
|
|
tarball = base_name + '.zip'
|
|
|
|
self.assertEqual(called,
|
|
|
|
[((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
|
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
def test_check_archive_formats(self):
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
|
|
|
|
'xxx')
|
|
|
|
self.assertEqual(check_archive_formats(['gztar', 'zip']), None)
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
def test_make_archive(self):
|
|
|
|
tmpdir = self.mkdtemp()
|
|
|
|
base_name = os.path.join(tmpdir, 'archive')
|
|
|
|
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
|
|
|
|
|
2009-10-24 10:42:10 -03:00
|
|
|
def test_make_archive_cwd(self):
|
|
|
|
current_dir = os.getcwd()
|
|
|
|
def _breaks(*args, **kw):
|
|
|
|
raise RuntimeError()
|
|
|
|
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
|
|
|
|
except:
|
|
|
|
pass
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(os.getcwd(), current_dir)
|
2009-10-24 10:42:10 -03:00
|
|
|
finally:
|
|
|
|
del ARCHIVE_FORMATS['xxx']
|
|
|
|
|
2009-05-17 09:12:02 -03:00
|
|
|
def test_suite():
|
|
|
|
return unittest.makeSuite(ArchiveUtilTestCase)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
Merged revisions 86236,86240,86332,86340,87271,87273,87447 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k
The missing NEWS entries correspond to changes that were made before 3.1.3, but
I think it’s not usual to edit entries of released versions, so I put them at
the top.
........
r86236 | eric.araujo | 2010-11-06 03:44:43 +0100 (sam., 06 nov. 2010) | 2 lines
Make sure each test can be run standalone (./python Lib/distutils/tests/x.py)
........
r86240 | eric.araujo | 2010-11-06 05:11:59 +0100 (sam., 06 nov. 2010) | 2 lines
Prevent ResourceWarnings in test_gettext
........
r86332 | eric.araujo | 2010-11-08 19:15:17 +0100 (lun., 08 nov. 2010) | 4 lines
Add missing NEWS entry for a fix committed by Senthil.
All recent modifications to distutils should now be covered in NEWS.
........
r86340 | eric.araujo | 2010-11-08 22:48:23 +0100 (lun., 08 nov. 2010) | 2 lines
This was actually fixed for the previous alpha.
........
r87271 | eric.araujo | 2010-12-15 20:09:58 +0100 (mer., 15 déc. 2010) | 2 lines
Improve trace documentation (#9264). Patch by Eli Bendersky.
........
r87273 | eric.araujo | 2010-12-15 20:30:15 +0100 (mer., 15 déc. 2010) | 2 lines
Use nested method directives, rewrap long lines, fix whitespace.
........
r87447 | eric.araujo | 2010-12-23 20:13:05 +0100 (jeu., 23 déc. 2010) | 2 lines
Fix typo in superclass method name
........
2011-02-02 17:38:37 -04:00
|
|
|
run_unittest(test_suite())
|