2011-12-26 13:17:01 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2009-05-17 09:12:02 -03:00
|
|
|
"""Tests for distutils.archive_util."""
|
|
|
|
import unittest
|
|
|
|
import os
|
2011-12-28 11:45:08 -04:00
|
|
|
import sys
|
2009-05-28 10:01:13 -03:00
|
|
|
import tarfile
|
2009-05-18 09:29:06 -03:00
|
|
|
from os.path import splitdrive
|
2009-05-28 10:01:13 -03:00
|
|
|
import warnings
|
2009-05-17 09:12:02 -03:00
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
from distutils import archive_util
|
2009-05-17 09:12:02 -03:00
|
|
|
from distutils.archive_util import (check_archive_formats, make_tarball,
|
2009-10-24 10:42:10 -03:00
|
|
|
make_zipfile, make_archive,
|
|
|
|
ARCHIVE_FORMATS)
|
2009-05-28 10:01:13 -03:00
|
|
|
from distutils.spawn import find_executable, spawn
|
2009-05-17 09:12:02 -03:00
|
|
|
from distutils.tests import support
|
2015-05-16 16:13:27 -03:00
|
|
|
from test.support import check_warnings, run_unittest, patch, change_cwd
|
2009-05-17 09:12:02 -03:00
|
|
|
|
2013-11-15 14:01:52 -04:00
|
|
|
try:
|
|
|
|
import grp
|
|
|
|
import pwd
|
|
|
|
UID_GID_SUPPORT = True
|
|
|
|
except ImportError:
|
|
|
|
UID_GID_SUPPORT = False
|
|
|
|
|
2009-05-17 09:12:02 -03:00
|
|
|
try:
|
|
|
|
import zipfile
|
|
|
|
ZIP_SUPPORT = True
|
|
|
|
except ImportError:
|
|
|
|
ZIP_SUPPORT = find_executable('zip')
|
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
try:
|
|
|
|
import zlib
|
|
|
|
ZLIB_SUPPORT = True
|
|
|
|
except ImportError:
|
|
|
|
ZLIB_SUPPORT = False
|
|
|
|
|
2015-05-16 16:13:27 -03:00
|
|
|
try:
|
|
|
|
import bz2
|
|
|
|
except ImportError:
|
|
|
|
bz2 = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
import lzma
|
|
|
|
except ImportError:
|
|
|
|
lzma = None
|
|
|
|
|
2011-12-28 11:45:08 -04:00
|
|
|
def can_fs_encode(filename):
|
|
|
|
"""
|
|
|
|
Return True if the filename can be saved in the file system.
|
|
|
|
"""
|
|
|
|
if os.path.supports_unicode_filenames:
|
|
|
|
return True
|
|
|
|
try:
|
|
|
|
filename.encode(sys.getfilesystemencoding())
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
|
2009-05-17 09:12:02 -03:00
|
|
|
class ArchiveUtilTestCase(support.TempdirManager,
|
2009-05-28 11:02:58 -03:00
|
|
|
support.LoggingSilencer,
|
2009-05-17 09:12:02 -03:00
|
|
|
unittest.TestCase):
|
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
2015-05-16 16:13:27 -03:00
|
|
|
def test_make_tarball(self, name='archive'):
|
|
|
|
# creating something to tar
|
|
|
|
tmpdir = self._create_files()
|
|
|
|
self._make_tarball(tmpdir, name, '.tar.gz')
|
|
|
|
# trying an uncompressed one
|
|
|
|
self._make_tarball(tmpdir, name, '.tar', compress=None)
|
2011-12-26 13:17:01 -04:00
|
|
|
|
|
|
|
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
2015-05-16 16:13:27 -03:00
|
|
|
def test_make_tarball_gzip(self):
|
|
|
|
tmpdir = self._create_files()
|
|
|
|
self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip')
|
|
|
|
|
|
|
|
@unittest.skipUnless(bz2, 'Need bz2 support to run')
|
|
|
|
def test_make_tarball_bzip2(self):
|
|
|
|
tmpdir = self._create_files()
|
|
|
|
self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2')
|
|
|
|
|
|
|
|
@unittest.skipUnless(lzma, 'Need lzma support to run')
|
|
|
|
def test_make_tarball_xz(self):
|
|
|
|
tmpdir = self._create_files()
|
|
|
|
self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz')
|
|
|
|
|
2011-12-28 11:45:08 -04:00
|
|
|
@unittest.skipUnless(can_fs_encode('årchiv'),
|
|
|
|
'File system cannot handle this filename')
|
2011-12-26 13:17:01 -04:00
|
|
|
def test_make_tarball_latin1(self):
|
|
|
|
"""
|
|
|
|
Mirror test_make_tarball, except filename contains latin characters.
|
|
|
|
"""
|
2015-05-16 16:13:27 -03:00
|
|
|
self.test_make_tarball('årchiv') # note this isn't a real word
|
2011-12-26 13:17:01 -04:00
|
|
|
|
2011-12-28 11:45:08 -04:00
|
|
|
@unittest.skipUnless(can_fs_encode('のアーカイブ'),
|
|
|
|
'File system cannot handle this filename')
|
2011-12-26 13:17:01 -04:00
|
|
|
def test_make_tarball_extended(self):
|
|
|
|
"""
|
|
|
|
Mirror test_make_tarball, except filename contains extended
|
|
|
|
characters outside the latin charset.
|
|
|
|
"""
|
2015-05-16 16:13:27 -03:00
|
|
|
self.test_make_tarball('のアーカイブ') # japanese for archive
|
2009-05-17 09:12:02 -03:00
|
|
|
|
2015-05-16 16:13:27 -03:00
|
|
|
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs):
|
2009-05-17 09:12:02 -03:00
|
|
|
tmpdir2 = self.mkdtemp()
|
2009-05-18 09:29:06 -03:00
|
|
|
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
|
2013-11-15 14:01:52 -04:00
|
|
|
"source and target should be on same drive")
|
2009-05-18 09:29:06 -03:00
|
|
|
|
2011-12-26 13:17:01 -04:00
|
|
|
base_name = os.path.join(tmpdir2, target_name)
|
2009-05-18 05:22:38 -03:00
|
|
|
|
|
|
|
# working with relative paths to avoid tar warnings
|
2015-05-16 16:13:27 -03:00
|
|
|
with change_cwd(tmpdir):
|
|
|
|
make_tarball(splitdrive(base_name)[1], 'dist', **kwargs)
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
# check if the compressed tarball was created
|
2015-05-16 16:13:27 -03:00
|
|
|
tarball = base_name + suffix
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2015-05-16 16:13:27 -03:00
|
|
|
self.assertEqual(self._tarinfo(tarball), self._created_files)
|
2009-05-17 09:12:02 -03:00
|
|
|
|
2009-05-28 10:01:13 -03:00
|
|
|
def _tarinfo(self, path):
|
|
|
|
tar = tarfile.open(path)
|
|
|
|
try:
|
|
|
|
names = tar.getnames()
|
|
|
|
names.sort()
|
2018-12-05 15:46:25 -04:00
|
|
|
return names
|
2009-05-28 10:01:13 -03:00
|
|
|
finally:
|
|
|
|
tar.close()
|
|
|
|
|
2018-12-05 15:46:25 -04:00
|
|
|
_zip_created_files = ['dist/', 'dist/file1', 'dist/file2',
|
|
|
|
'dist/sub/', 'dist/sub/file3', 'dist/sub2/']
|
|
|
|
_created_files = [p.rstrip('/') for p in _zip_created_files]
|
2015-05-16 16:13:27 -03:00
|
|
|
|
2009-05-28 10:01:13 -03:00
|
|
|
def _create_files(self):
|
|
|
|
# creating something to tar
|
|
|
|
tmpdir = self.mkdtemp()
|
|
|
|
dist = os.path.join(tmpdir, 'dist')
|
|
|
|
os.mkdir(dist)
|
|
|
|
self.write_file([dist, 'file1'], 'xxx')
|
|
|
|
self.write_file([dist, 'file2'], 'xxx')
|
|
|
|
os.mkdir(os.path.join(dist, 'sub'))
|
|
|
|
self.write_file([dist, 'sub', 'file3'], 'xxx')
|
|
|
|
os.mkdir(os.path.join(dist, 'sub2'))
|
2015-05-16 16:13:27 -03:00
|
|
|
return tmpdir
|
2009-05-28 10:01:13 -03:00
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
@unittest.skipUnless(find_executable('tar') and find_executable('gzip')
|
|
|
|
and ZLIB_SUPPORT,
|
|
|
|
'Need the tar, gzip and zlib command to run')
|
2009-05-28 10:01:13 -03:00
|
|
|
def test_tarfile_vs_tar(self):
|
2015-05-16 16:13:27 -03:00
|
|
|
tmpdir = self._create_files()
|
|
|
|
tmpdir2 = self.mkdtemp()
|
|
|
|
base_name = os.path.join(tmpdir2, 'archive')
|
2009-05-28 10:01:13 -03:00
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
make_tarball(base_name, 'dist')
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
|
|
|
|
# check if the compressed tarball was created
|
|
|
|
tarball = base_name + '.tar.gz'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# now create another tarball using `tar`
|
|
|
|
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
|
2009-05-28 11:02:58 -03:00
|
|
|
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
|
2017-11-18 13:17:16 -04:00
|
|
|
gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar']
|
2009-05-28 10:01:13 -03:00
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
2009-05-28 11:02:58 -03:00
|
|
|
spawn(tar_cmd)
|
|
|
|
spawn(gzip_cmd)
|
2009-05-28 10:01:13 -03:00
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball2))
|
2009-05-28 10:01:13 -03:00
|
|
|
# let's compare both tarballs
|
2015-05-16 16:13:27 -03:00
|
|
|
self.assertEqual(self._tarinfo(tarball), self._created_files)
|
|
|
|
self.assertEqual(self._tarinfo(tarball2), self._created_files)
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# trying an uncompressed one
|
|
|
|
base_name = os.path.join(tmpdir2, 'archive')
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
make_tarball(base_name, 'dist', compress=None)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
tarball = base_name + '.tar'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# now for a dry_run
|
|
|
|
base_name = os.path.join(tmpdir2, 'archive')
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
make_tarball(base_name, 'dist', compress=None, dry_run=True)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
tarball = base_name + '.tar'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
@unittest.skipUnless(find_executable('compress'),
|
|
|
|
'The compress program is required')
|
|
|
|
def test_compress_deprecated(self):
|
2015-05-16 16:13:27 -03:00
|
|
|
tmpdir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp(), 'archive')
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# using compress and testing the PendingDeprecationWarning
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
with check_warnings() as w:
|
|
|
|
warnings.simplefilter("always")
|
|
|
|
make_tarball(base_name, 'dist', compress='compress')
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
tarball = base_name + '.tar.Z'
|
2009-08-13 05:51:18 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(len(w.warnings), 1)
|
2009-05-28 10:01:13 -03:00
|
|
|
|
|
|
|
# same test with dry_run
|
|
|
|
os.remove(tarball)
|
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
with check_warnings() as w:
|
|
|
|
warnings.simplefilter("always")
|
|
|
|
make_tarball(base_name, 'dist', compress='compress',
|
|
|
|
dry_run=True)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
2013-11-16 18:17:46 -04:00
|
|
|
self.assertFalse(os.path.exists(tarball))
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(len(w.warnings), 1)
|
2009-05-28 10:01:13 -03:00
|
|
|
|
2011-03-15 17:02:59 -03:00
|
|
|
@unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
|
|
|
|
'Need zip and zlib support to run')
|
2009-05-17 12:03:23 -03:00
|
|
|
def test_make_zipfile(self):
|
2009-05-17 09:12:02 -03:00
|
|
|
# creating something to tar
|
2015-05-16 16:13:27 -03:00
|
|
|
tmpdir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp(), 'archive')
|
|
|
|
with change_cwd(tmpdir):
|
|
|
|
make_zipfile(base_name, 'dist')
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
# check if the compressed tarball was created
|
|
|
|
tarball = base_name + '.zip'
|
2011-03-15 17:02:59 -03:00
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2015-05-16 16:13:27 -03:00
|
|
|
with zipfile.ZipFile(tarball) as zf:
|
2018-12-05 15:46:25 -04:00
|
|
|
self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
|
2011-03-15 17:02:59 -03:00
|
|
|
|
|
|
|
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
|
|
|
|
def test_make_zipfile_no_zlib(self):
|
|
|
|
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
|
|
|
|
|
|
|
|
called = []
|
|
|
|
zipfile_class = zipfile.ZipFile
|
|
|
|
def fake_zipfile(*a, **kw):
|
|
|
|
if kw.get('compression', None) == zipfile.ZIP_STORED:
|
|
|
|
called.append((a, kw))
|
|
|
|
return zipfile_class(*a, **kw)
|
|
|
|
|
|
|
|
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
|
|
|
|
|
|
|
|
# create something to tar and compress
|
2015-05-16 16:13:27 -03:00
|
|
|
tmpdir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp(), 'archive')
|
|
|
|
with change_cwd(tmpdir):
|
|
|
|
make_zipfile(base_name, 'dist')
|
2011-03-15 17:02:59 -03:00
|
|
|
|
|
|
|
tarball = base_name + '.zip'
|
|
|
|
self.assertEqual(called,
|
|
|
|
[((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
|
|
|
|
self.assertTrue(os.path.exists(tarball))
|
2015-05-16 16:13:27 -03:00
|
|
|
with zipfile.ZipFile(tarball) as zf:
|
2018-12-05 15:46:25 -04:00
|
|
|
self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
def test_check_archive_formats(self):
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
|
|
|
|
'xxx')
|
2015-05-16 16:13:27 -03:00
|
|
|
self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar',
|
|
|
|
'ztar', 'tar', 'zip']))
|
2009-05-17 09:12:02 -03:00
|
|
|
|
|
|
|
def test_make_archive(self):
|
|
|
|
tmpdir = self.mkdtemp()
|
|
|
|
base_name = os.path.join(tmpdir, 'archive')
|
|
|
|
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
|
|
|
|
|
2009-10-24 10:42:10 -03:00
|
|
|
def test_make_archive_cwd(self):
|
|
|
|
current_dir = os.getcwd()
|
|
|
|
def _breaks(*args, **kw):
|
|
|
|
raise RuntimeError()
|
|
|
|
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
|
|
|
|
except:
|
|
|
|
pass
|
2010-11-20 21:30:29 -04:00
|
|
|
self.assertEqual(os.getcwd(), current_dir)
|
2009-10-24 10:42:10 -03:00
|
|
|
finally:
|
|
|
|
del ARCHIVE_FORMATS['xxx']
|
|
|
|
|
2015-05-16 16:13:27 -03:00
|
|
|
def test_make_archive_tar(self):
|
|
|
|
base_dir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp() , 'archive')
|
|
|
|
res = make_archive(base_name, 'tar', base_dir, 'dist')
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
self.assertEqual(os.path.basename(res), 'archive.tar')
|
|
|
|
self.assertEqual(self._tarinfo(res), self._created_files)
|
|
|
|
|
|
|
|
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
|
|
|
|
def test_make_archive_gztar(self):
|
|
|
|
base_dir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp() , 'archive')
|
|
|
|
res = make_archive(base_name, 'gztar', base_dir, 'dist')
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
self.assertEqual(os.path.basename(res), 'archive.tar.gz')
|
|
|
|
self.assertEqual(self._tarinfo(res), self._created_files)
|
|
|
|
|
|
|
|
@unittest.skipUnless(bz2, 'Need bz2 support to run')
|
|
|
|
def test_make_archive_bztar(self):
|
|
|
|
base_dir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp() , 'archive')
|
|
|
|
res = make_archive(base_name, 'bztar', base_dir, 'dist')
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
self.assertEqual(os.path.basename(res), 'archive.tar.bz2')
|
|
|
|
self.assertEqual(self._tarinfo(res), self._created_files)
|
|
|
|
|
2015-05-16 20:23:02 -03:00
|
|
|
@unittest.skipUnless(lzma, 'Need xz support to run')
|
2015-05-16 16:13:27 -03:00
|
|
|
def test_make_archive_xztar(self):
|
|
|
|
base_dir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp() , 'archive')
|
|
|
|
res = make_archive(base_name, 'xztar', base_dir, 'dist')
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
self.assertEqual(os.path.basename(res), 'archive.tar.xz')
|
|
|
|
self.assertEqual(self._tarinfo(res), self._created_files)
|
|
|
|
|
2013-11-15 14:01:52 -04:00
|
|
|
def test_make_archive_owner_group(self):
|
|
|
|
# testing make_archive with owner and group, with various combinations
|
|
|
|
# this works even if there's not gid/uid support
|
|
|
|
if UID_GID_SUPPORT:
|
|
|
|
group = grp.getgrgid(0)[0]
|
|
|
|
owner = pwd.getpwuid(0)[0]
|
|
|
|
else:
|
|
|
|
group = owner = 'root'
|
|
|
|
|
2015-05-16 16:13:27 -03:00
|
|
|
base_dir = self._create_files()
|
|
|
|
root_dir = self.mkdtemp()
|
2013-11-15 14:01:52 -04:00
|
|
|
base_name = os.path.join(self.mkdtemp() , 'archive')
|
|
|
|
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
|
|
|
|
group=group)
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
|
|
|
|
res = make_archive(base_name, 'zip', root_dir, base_dir)
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
|
|
|
|
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
|
|
|
owner=owner, group=group)
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
|
|
|
|
res = make_archive(base_name, 'tar', root_dir, base_dir,
|
|
|
|
owner='kjhkjhkjg', group='oihohoh')
|
|
|
|
self.assertTrue(os.path.exists(res))
|
|
|
|
|
2013-12-30 16:39:46 -04:00
|
|
|
@unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib")
|
2013-11-15 14:01:52 -04:00
|
|
|
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
|
|
|
|
def test_tarfile_root_owner(self):
|
2015-05-16 16:13:27 -03:00
|
|
|
tmpdir = self._create_files()
|
|
|
|
base_name = os.path.join(self.mkdtemp(), 'archive')
|
2013-11-15 14:01:52 -04:00
|
|
|
old_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
group = grp.getgrgid(0)[0]
|
|
|
|
owner = pwd.getpwuid(0)[0]
|
|
|
|
try:
|
|
|
|
archive_name = make_tarball(base_name, 'dist', compress=None,
|
|
|
|
owner=owner, group=group)
|
|
|
|
finally:
|
|
|
|
os.chdir(old_dir)
|
|
|
|
|
|
|
|
# check if the compressed tarball was created
|
|
|
|
self.assertTrue(os.path.exists(archive_name))
|
|
|
|
|
|
|
|
# now checks the rights
|
|
|
|
archive = tarfile.open(archive_name)
|
|
|
|
try:
|
|
|
|
for member in archive.getmembers():
|
2013-11-15 20:41:57 -04:00
|
|
|
self.assertEqual(member.uid, 0)
|
|
|
|
self.assertEqual(member.gid, 0)
|
2013-11-15 14:01:52 -04:00
|
|
|
finally:
|
|
|
|
archive.close()
|
|
|
|
|
2009-05-17 09:12:02 -03:00
|
|
|
def test_suite():
|
|
|
|
return unittest.makeSuite(ArchiveUtilTestCase)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
Merged revisions 86236,86240,86332,86340,87271,87273,87447 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k
The missing NEWS entries correspond to changes that were made before 3.1.3, but
I think it’s not usual to edit entries of released versions, so I put them at
the top.
........
r86236 | eric.araujo | 2010-11-06 03:44:43 +0100 (sam., 06 nov. 2010) | 2 lines
Make sure each test can be run standalone (./python Lib/distutils/tests/x.py)
........
r86240 | eric.araujo | 2010-11-06 05:11:59 +0100 (sam., 06 nov. 2010) | 2 lines
Prevent ResourceWarnings in test_gettext
........
r86332 | eric.araujo | 2010-11-08 19:15:17 +0100 (lun., 08 nov. 2010) | 4 lines
Add missing NEWS entry for a fix committed by Senthil.
All recent modifications to distutils should now be covered in NEWS.
........
r86340 | eric.araujo | 2010-11-08 22:48:23 +0100 (lun., 08 nov. 2010) | 2 lines
This was actually fixed for the previous alpha.
........
r87271 | eric.araujo | 2010-12-15 20:09:58 +0100 (mer., 15 déc. 2010) | 2 lines
Improve trace documentation (#9264). Patch by Eli Bendersky.
........
r87273 | eric.araujo | 2010-12-15 20:30:15 +0100 (mer., 15 déc. 2010) | 2 lines
Use nested method directives, rewrap long lines, fix whitespace.
........
r87447 | eric.araujo | 2010-12-23 20:13:05 +0100 (jeu., 23 déc. 2010) | 2 lines
Fix typo in superclass method name
........
2011-02-02 17:38:37 -04:00
|
|
|
run_unittest(test_suite())
|