2010-02-23 01:36:41 -04:00
|
|
|
"""Utility functions for copying and archiving files and directory trees.
|
1997-04-29 11:45:19 -03:00
|
|
|
|
1999-08-18 17:03:17 -03:00
|
|
|
XXX The functions here don't copy the resource fork or other metadata on Mac.
|
1997-04-29 11:45:19 -03:00
|
|
|
|
|
|
|
"""
|
1990-10-13 16:23:40 -03:00
|
|
|
|
1992-03-31 14:55:40 -04:00
|
|
|
import os
|
1999-02-23 19:07:51 -04:00
|
|
|
import sys
|
1997-04-29 11:45:19 -03:00
|
|
|
import stat
|
2004-06-19 18:11:35 -03:00
|
|
|
from os.path import abspath
|
2008-07-05 07:13:36 -03:00
|
|
|
import fnmatch
|
2010-03-07 08:14:25 -04:00
|
|
|
import collections
|
2010-03-22 16:59:46 -03:00
|
|
|
import errno
|
2010-02-23 01:16:41 -04:00
|
|
|
|
2016-12-16 13:04:17 -04:00
|
|
|
try:
|
|
|
|
import zlib
|
|
|
|
del zlib
|
|
|
|
_ZLIB_SUPPORTED = True
|
|
|
|
except ImportError:
|
|
|
|
_ZLIB_SUPPORTED = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
import bz2
|
|
|
|
del bz2
|
|
|
|
_BZ2_SUPPORTED = True
|
|
|
|
except ImportError:
|
|
|
|
_BZ2_SUPPORTED = False
|
|
|
|
|
2010-02-23 01:16:41 -04:00
|
|
|
try:
|
|
|
|
from pwd import getpwnam
|
|
|
|
except ImportError:
|
|
|
|
getpwnam = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
from grp import getgrnam
|
|
|
|
except ImportError:
|
|
|
|
getgrnam = None
|
1990-10-13 16:23:40 -03:00
|
|
|
|
2010-02-23 01:36:41 -04:00
|
|
|
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
|
|
|
|
"copytree", "move", "rmtree", "Error", "SpecialFileError",
|
|
|
|
"ExecError", "make_archive", "get_archive_formats",
|
2011-08-21 11:14:01 -03:00
|
|
|
"register_archive_format", "unregister_archive_format",
|
|
|
|
"ignore_patterns"]
|
2002-10-07 10:23:24 -03:00
|
|
|
|
2005-08-31 21:45:28 -03:00
|
|
|
class Error(EnvironmentError):
|
2002-10-07 10:23:24 -03:00
|
|
|
pass
|
1990-10-13 16:23:40 -03:00
|
|
|
|
2009-05-01 17:55:35 -03:00
|
|
|
class SpecialFileError(EnvironmentError):
|
|
|
|
"""Raised when trying to do a kind of operation (e.g. copying) which is
|
|
|
|
not supported on a special file (e.g. a named pipe)"""
|
|
|
|
|
2010-02-23 01:16:41 -04:00
|
|
|
class ExecError(EnvironmentError):
|
|
|
|
"""Raised when a command could not be executed"""
|
|
|
|
|
2008-08-11 14:21:36 -03:00
|
|
|
try:
|
|
|
|
WindowsError
|
|
|
|
except NameError:
|
|
|
|
WindowsError = None
|
|
|
|
|
2000-07-12 06:55:30 -03:00
|
|
|
def copyfileobj(fsrc, fdst, length=16*1024):
|
|
|
|
"""copy data from file-like object fsrc to file-like object fdst"""
|
|
|
|
while 1:
|
|
|
|
buf = fsrc.read(length)
|
|
|
|
if not buf:
|
|
|
|
break
|
|
|
|
fdst.write(buf)
|
|
|
|
|
2004-08-14 10:30:02 -03:00
|
|
|
def _samefile(src, dst):
|
|
|
|
# Macintosh, Unix.
|
2010-04-19 18:13:03 -03:00
|
|
|
if hasattr(os.path, 'samefile'):
|
2004-08-14 11:51:01 -03:00
|
|
|
try:
|
|
|
|
return os.path.samefile(src, dst)
|
|
|
|
except OSError:
|
|
|
|
return False
|
2004-08-14 10:30:02 -03:00
|
|
|
|
|
|
|
# All other platforms: check for same pathname.
|
|
|
|
return (os.path.normcase(os.path.abspath(src)) ==
|
|
|
|
os.path.normcase(os.path.abspath(dst)))
|
2001-01-14 21:36:40 -04:00
|
|
|
|
1990-10-13 16:23:40 -03:00
|
|
|
def copyfile(src, dst):
|
1997-04-29 11:45:19 -03:00
|
|
|
"""Copy data from src to dst"""
|
2004-08-14 10:30:02 -03:00
|
|
|
if _samefile(src, dst):
|
2010-04-19 18:13:03 -03:00
|
|
|
raise Error("`%s` and `%s` are the same file" % (src, dst))
|
2004-08-14 10:30:02 -03:00
|
|
|
|
2009-05-01 17:55:35 -03:00
|
|
|
for fn in [src, dst]:
|
|
|
|
try:
|
|
|
|
st = os.stat(fn)
|
|
|
|
except OSError:
|
|
|
|
# File most likely does not exist
|
|
|
|
pass
|
2009-06-05 16:09:28 -03:00
|
|
|
else:
|
|
|
|
# XXX What about other special files? (sockets, devices...)
|
|
|
|
if stat.S_ISFIFO(st.st_mode):
|
|
|
|
raise SpecialFileError("`%s` is a named pipe" % fn)
|
2010-05-05 19:41:25 -03:00
|
|
|
|
2010-05-05 19:15:31 -03:00
|
|
|
with open(src, 'rb') as fsrc:
|
|
|
|
with open(dst, 'wb') as fdst:
|
|
|
|
copyfileobj(fsrc, fdst)
|
1990-10-13 16:23:40 -03:00
|
|
|
|
|
|
|
def copymode(src, dst):
|
1997-04-29 11:45:19 -03:00
|
|
|
"""Copy mode bits from src to dst"""
|
2001-01-21 16:00:00 -04:00
|
|
|
if hasattr(os, 'chmod'):
|
|
|
|
st = os.stat(src)
|
2002-06-06 06:48:13 -03:00
|
|
|
mode = stat.S_IMODE(st.st_mode)
|
2001-01-21 16:00:00 -04:00
|
|
|
os.chmod(dst, mode)
|
1990-10-13 16:23:40 -03:00
|
|
|
|
|
|
|
def copystat(src, dst):
|
2007-02-19 06:55:19 -04:00
|
|
|
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
|
1997-04-29 11:06:46 -03:00
|
|
|
st = os.stat(src)
|
2002-06-06 06:48:13 -03:00
|
|
|
mode = stat.S_IMODE(st.st_mode)
|
2001-01-21 16:00:00 -04:00
|
|
|
if hasattr(os, 'utime'):
|
2002-06-06 06:48:13 -03:00
|
|
|
os.utime(dst, (st.st_atime, st.st_mtime))
|
2001-01-21 16:00:00 -04:00
|
|
|
if hasattr(os, 'chmod'):
|
|
|
|
os.chmod(dst, mode)
|
2007-02-19 06:55:19 -04:00
|
|
|
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
|
2010-03-22 16:59:46 -03:00
|
|
|
try:
|
|
|
|
os.chflags(dst, st.st_flags)
|
|
|
|
except OSError, why:
|
2012-05-10 21:45:49 -03:00
|
|
|
for err in 'EOPNOTSUPP', 'ENOTSUP':
|
|
|
|
if hasattr(errno, err) and why.errno == getattr(errno, err):
|
|
|
|
break
|
|
|
|
else:
|
2010-03-22 16:59:46 -03:00
|
|
|
raise
|
1990-10-13 16:23:40 -03:00
|
|
|
|
|
|
|
def copy(src, dst):
|
1997-04-29 11:45:19 -03:00
|
|
|
"""Copy data and mode bits ("cp src dst").
|
2001-01-14 21:36:40 -04:00
|
|
|
|
1997-04-29 11:45:19 -03:00
|
|
|
The destination may be a directory.
|
|
|
|
|
|
|
|
"""
|
1997-04-29 11:06:46 -03:00
|
|
|
if os.path.isdir(dst):
|
1998-03-26 17:13:24 -04:00
|
|
|
dst = os.path.join(dst, os.path.basename(src))
|
1997-04-29 11:06:46 -03:00
|
|
|
copyfile(src, dst)
|
|
|
|
copymode(src, dst)
|
1990-10-13 16:23:40 -03:00
|
|
|
|
|
|
|
def copy2(src, dst):
|
1997-04-29 11:45:19 -03:00
|
|
|
"""Copy data and all stat info ("cp -p src dst").
|
|
|
|
|
|
|
|
The destination may be a directory.
|
|
|
|
|
|
|
|
"""
|
1997-04-29 11:06:46 -03:00
|
|
|
if os.path.isdir(dst):
|
1998-03-26 17:13:24 -04:00
|
|
|
dst = os.path.join(dst, os.path.basename(src))
|
1997-04-29 11:06:46 -03:00
|
|
|
copyfile(src, dst)
|
|
|
|
copystat(src, dst)
|
1990-10-13 16:23:40 -03:00
|
|
|
|
2008-07-05 07:13:36 -03:00
|
|
|
def ignore_patterns(*patterns):
|
|
|
|
"""Function that can be used as copytree() ignore parameter.
|
1997-04-29 11:45:19 -03:00
|
|
|
|
2008-07-05 07:13:36 -03:00
|
|
|
Patterns is a sequence of glob-style patterns
|
|
|
|
that are used to exclude files"""
|
|
|
|
def _ignore_patterns(path, names):
|
|
|
|
ignored_names = []
|
|
|
|
for pattern in patterns:
|
|
|
|
ignored_names.extend(fnmatch.filter(names, pattern))
|
|
|
|
return set(ignored_names)
|
|
|
|
return _ignore_patterns
|
|
|
|
|
|
|
|
def copytree(src, dst, symlinks=False, ignore=None):
|
1997-04-29 11:45:19 -03:00
|
|
|
"""Recursively copy a directory tree using copy2().
|
|
|
|
|
|
|
|
The destination directory must not already exist.
|
2003-02-23 17:36:32 -04:00
|
|
|
If exception(s) occur, an Error is raised with a list of reasons.
|
1997-04-29 11:45:19 -03:00
|
|
|
|
|
|
|
If the optional symlinks flag is true, symbolic links in the
|
|
|
|
source tree result in symbolic links in the destination tree; if
|
|
|
|
it is false, the contents of the files pointed to by symbolic
|
|
|
|
links are copied.
|
|
|
|
|
2008-07-05 07:13:36 -03:00
|
|
|
The optional ignore argument is a callable. If given, it
|
|
|
|
is called with the `src` parameter, which is the directory
|
|
|
|
being visited by copytree(), and `names` which is the list of
|
|
|
|
`src` contents, as returned by os.listdir():
|
|
|
|
|
|
|
|
callable(src, names) -> ignored_names
|
|
|
|
|
|
|
|
Since copytree() is called recursively, the callable will be
|
|
|
|
called once for each directory that is copied. It returns a
|
|
|
|
list of names relative to the `src` directory that should
|
|
|
|
not be copied.
|
|
|
|
|
1997-04-29 11:45:19 -03:00
|
|
|
XXX Consider this example code rather than the ultimate tool.
|
|
|
|
|
|
|
|
"""
|
1997-04-29 11:06:46 -03:00
|
|
|
names = os.listdir(src)
|
2008-07-05 07:13:36 -03:00
|
|
|
if ignore is not None:
|
|
|
|
ignored_names = ignore(src, names)
|
|
|
|
else:
|
|
|
|
ignored_names = set()
|
|
|
|
|
2005-01-08 08:31:29 -04:00
|
|
|
os.makedirs(dst)
|
2002-10-07 10:23:24 -03:00
|
|
|
errors = []
|
1997-04-29 11:06:46 -03:00
|
|
|
for name in names:
|
2008-07-05 07:13:36 -03:00
|
|
|
if name in ignored_names:
|
|
|
|
continue
|
1998-03-26 17:13:24 -04:00
|
|
|
srcname = os.path.join(src, name)
|
|
|
|
dstname = os.path.join(dst, name)
|
|
|
|
try:
|
|
|
|
if symlinks and os.path.islink(srcname):
|
|
|
|
linkto = os.readlink(srcname)
|
|
|
|
os.symlink(linkto, dstname)
|
|
|
|
elif os.path.isdir(srcname):
|
2008-07-05 07:13:36 -03:00
|
|
|
copytree(srcname, dstname, symlinks, ignore)
|
1998-03-26 17:13:24 -04:00
|
|
|
else:
|
2009-05-01 17:55:35 -03:00
|
|
|
# Will raise a SpecialFileError for unsupported file types
|
1998-03-26 17:13:24 -04:00
|
|
|
copy2(srcname, dstname)
|
2005-08-31 19:48:45 -03:00
|
|
|
# catch the Error from the recursive copytree so that we can
|
|
|
|
# continue with other files
|
|
|
|
except Error, err:
|
|
|
|
errors.extend(err.args[0])
|
2009-05-01 17:55:35 -03:00
|
|
|
except EnvironmentError, why:
|
|
|
|
errors.append((srcname, dstname, str(why)))
|
2006-07-30 10:00:31 -03:00
|
|
|
try:
|
|
|
|
copystat(src, dst)
|
|
|
|
except OSError, why:
|
2008-08-11 14:21:36 -03:00
|
|
|
if WindowsError is not None and isinstance(why, WindowsError):
|
|
|
|
# Copying file access times may fail on Windows
|
|
|
|
pass
|
|
|
|
else:
|
2012-08-25 05:11:57 -03:00
|
|
|
errors.append((src, dst, str(why)))
|
2002-10-07 10:23:24 -03:00
|
|
|
if errors:
|
|
|
|
raise Error, errors
|
1998-02-06 17:38:09 -04:00
|
|
|
|
2003-01-24 13:36:15 -04:00
|
|
|
def rmtree(path, ignore_errors=False, onerror=None):
|
1998-02-06 17:38:09 -04:00
|
|
|
"""Recursively delete a directory tree.
|
|
|
|
|
2004-10-31 08:05:31 -04:00
|
|
|
If ignore_errors is set, errors are ignored; otherwise, if onerror
|
|
|
|
is set, it is called to handle the error with arguments (func,
|
|
|
|
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
|
|
|
|
path is the argument to that function that caused it to fail; and
|
|
|
|
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
|
|
|
|
is false and onerror is None, an exception is raised.
|
|
|
|
|
1998-02-06 17:38:09 -04:00
|
|
|
"""
|
2004-10-31 08:05:31 -04:00
|
|
|
if ignore_errors:
|
|
|
|
def onerror(*args):
|
2003-01-24 13:36:15 -04:00
|
|
|
pass
|
2004-10-31 08:05:31 -04:00
|
|
|
elif onerror is None:
|
|
|
|
def onerror(*args):
|
|
|
|
raise
|
2008-01-20 10:17:42 -04:00
|
|
|
try:
|
|
|
|
if os.path.islink(path):
|
|
|
|
# symlinks to directories are forbidden, see bug #1669
|
|
|
|
raise OSError("Cannot call rmtree on a symbolic link")
|
|
|
|
except OSError:
|
|
|
|
onerror(os.path.islink, path, sys.exc_info())
|
|
|
|
# can't continue even if onerror hook returns
|
|
|
|
return
|
2004-10-31 08:05:31 -04:00
|
|
|
names = []
|
|
|
|
try:
|
|
|
|
names = os.listdir(path)
|
|
|
|
except os.error, err:
|
|
|
|
onerror(os.listdir, path, sys.exc_info())
|
|
|
|
for name in names:
|
|
|
|
fullname = os.path.join(path, name)
|
|
|
|
try:
|
|
|
|
mode = os.lstat(fullname).st_mode
|
|
|
|
except os.error:
|
|
|
|
mode = 0
|
|
|
|
if stat.S_ISDIR(mode):
|
|
|
|
rmtree(fullname, ignore_errors, onerror)
|
2003-01-24 13:36:15 -04:00
|
|
|
else:
|
2004-10-31 08:05:31 -04:00
|
|
|
try:
|
|
|
|
os.remove(fullname)
|
|
|
|
except os.error, err:
|
|
|
|
onerror(os.remove, fullname, sys.exc_info())
|
|
|
|
try:
|
|
|
|
os.rmdir(path)
|
|
|
|
except os.error:
|
|
|
|
onerror(os.rmdir, path, sys.exc_info())
|
1998-02-06 17:38:09 -04:00
|
|
|
|
2008-03-18 14:24:12 -03:00
|
|
|
|
|
|
|
def _basename(path):
|
|
|
|
# A basename() variant which first strips the trailing slash, if present.
|
|
|
|
# Thus we always get the last component of the path, even for directories.
|
2014-02-11 04:30:06 -04:00
|
|
|
sep = os.path.sep + (os.path.altsep or '')
|
|
|
|
return os.path.basename(path.rstrip(sep))
|
2008-03-18 14:24:12 -03:00
|
|
|
|
2002-10-07 10:23:24 -03:00
|
|
|
def move(src, dst):
|
2008-03-18 14:24:12 -03:00
|
|
|
"""Recursively move a file or directory to another location. This is
|
|
|
|
similar to the Unix "mv" command.
|
|
|
|
|
|
|
|
If the destination is a directory or a symlink to a directory, the source
|
|
|
|
is moved inside the directory. The destination path must not already
|
|
|
|
exist.
|
2002-10-07 10:23:24 -03:00
|
|
|
|
2008-03-18 14:24:12 -03:00
|
|
|
If the destination already exists but is not a directory, it may be
|
|
|
|
overwritten depending on os.rename() semantics.
|
|
|
|
|
|
|
|
If the destination is on our current filesystem, then rename() is used.
|
|
|
|
Otherwise, src is copied to the destination and then removed.
|
2002-10-07 10:23:24 -03:00
|
|
|
A lot more could be done here... A look at a mv.c shows a lot of
|
|
|
|
the issues this implementation glosses over.
|
|
|
|
|
|
|
|
"""
|
2008-03-18 14:24:12 -03:00
|
|
|
real_dst = dst
|
|
|
|
if os.path.isdir(dst):
|
2011-05-06 06:31:33 -03:00
|
|
|
if _samefile(src, dst):
|
|
|
|
# We might be on a case insensitive filesystem,
|
|
|
|
# perform the rename anyway.
|
|
|
|
os.rename(src, dst)
|
|
|
|
return
|
|
|
|
|
2008-03-18 14:24:12 -03:00
|
|
|
real_dst = os.path.join(dst, _basename(src))
|
|
|
|
if os.path.exists(real_dst):
|
|
|
|
raise Error, "Destination path '%s' already exists" % real_dst
|
2002-10-07 10:23:24 -03:00
|
|
|
try:
|
2008-03-18 14:24:12 -03:00
|
|
|
os.rename(src, real_dst)
|
2002-10-07 10:23:24 -03:00
|
|
|
except OSError:
|
|
|
|
if os.path.isdir(src):
|
2009-02-07 15:08:22 -04:00
|
|
|
if _destinsrc(src, dst):
|
2004-06-19 18:11:35 -03:00
|
|
|
raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst)
|
2008-03-18 14:24:12 -03:00
|
|
|
copytree(src, real_dst, symlinks=True)
|
2002-10-07 10:23:24 -03:00
|
|
|
rmtree(src)
|
|
|
|
else:
|
2008-03-18 14:24:12 -03:00
|
|
|
copy2(src, real_dst)
|
2002-10-07 10:23:24 -03:00
|
|
|
os.unlink(src)
|
2004-06-19 18:11:35 -03:00
|
|
|
|
2009-02-07 15:08:22 -04:00
|
|
|
def _destinsrc(src, dst):
|
2009-01-29 16:19:34 -04:00
|
|
|
src = abspath(src)
|
|
|
|
dst = abspath(dst)
|
|
|
|
if not src.endswith(os.path.sep):
|
|
|
|
src += os.path.sep
|
|
|
|
if not dst.endswith(os.path.sep):
|
|
|
|
dst += os.path.sep
|
|
|
|
return dst.startswith(src)
|
2010-02-23 01:16:41 -04:00
|
|
|
|
|
|
|
def _get_gid(name):
|
|
|
|
"""Returns a gid, given a group name."""
|
|
|
|
if getgrnam is None or name is None:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
result = getgrnam(name)
|
|
|
|
except KeyError:
|
|
|
|
result = None
|
|
|
|
if result is not None:
|
|
|
|
return result[2]
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _get_uid(name):
|
|
|
|
"""Returns an uid, given a user name."""
|
|
|
|
if getpwnam is None or name is None:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
result = getpwnam(name)
|
|
|
|
except KeyError:
|
|
|
|
result = None
|
|
|
|
if result is not None:
|
|
|
|
return result[2]
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
|
|
|
|
owner=None, group=None, logger=None):
|
|
|
|
"""Create a (possibly compressed) tar file from all the files under
|
|
|
|
'base_dir'.
|
|
|
|
|
2010-04-20 18:09:06 -03:00
|
|
|
'compress' must be "gzip" (the default), "bzip2", or None.
|
2010-02-23 01:16:41 -04:00
|
|
|
|
|
|
|
'owner' and 'group' can be used to define an owner and a group for the
|
|
|
|
archive that is being built. If not provided, the current owner and group
|
|
|
|
will be used.
|
|
|
|
|
2010-12-15 16:33:50 -04:00
|
|
|
The output tar file will be named 'base_name' + ".tar", possibly plus
|
2010-04-20 18:09:06 -03:00
|
|
|
the appropriate compression extension (".gz", or ".bz2").
|
2010-02-23 01:16:41 -04:00
|
|
|
|
|
|
|
Returns the output filename.
|
|
|
|
"""
|
2016-12-16 13:04:17 -04:00
|
|
|
if compress is None:
|
|
|
|
tar_compression = ''
|
|
|
|
elif _ZLIB_SUPPORTED and compress == 'gzip':
|
|
|
|
tar_compression = 'gz'
|
|
|
|
elif _BZ2_SUPPORTED and compress == 'bzip2':
|
|
|
|
tar_compression = 'bz2'
|
|
|
|
else:
|
|
|
|
raise ValueError("bad value for 'compress', or compression format not "
|
|
|
|
"supported : {0}".format(compress))
|
2010-02-23 01:16:41 -04:00
|
|
|
|
2016-12-16 13:04:17 -04:00
|
|
|
compress_ext = '.' + tar_compression if compress else ''
|
|
|
|
archive_name = base_name + '.tar' + compress_ext
|
2010-02-23 01:16:41 -04:00
|
|
|
archive_dir = os.path.dirname(archive_name)
|
2010-04-20 18:09:06 -03:00
|
|
|
|
2014-11-27 18:50:06 -04:00
|
|
|
if archive_dir and not os.path.exists(archive_dir):
|
2011-08-18 22:07:39 -03:00
|
|
|
if logger is not None:
|
|
|
|
logger.info("creating %s", archive_dir)
|
2010-02-23 01:16:41 -04:00
|
|
|
if not dry_run:
|
|
|
|
os.makedirs(archive_dir)
|
|
|
|
|
|
|
|
|
|
|
|
# creating the tarball
|
|
|
|
import tarfile # late import so Python build itself doesn't break
|
|
|
|
|
|
|
|
if logger is not None:
|
|
|
|
logger.info('Creating tar archive')
|
|
|
|
|
|
|
|
uid = _get_uid(owner)
|
|
|
|
gid = _get_gid(group)
|
|
|
|
|
|
|
|
def _set_uid_gid(tarinfo):
|
|
|
|
if gid is not None:
|
|
|
|
tarinfo.gid = gid
|
|
|
|
tarinfo.gname = group
|
|
|
|
if uid is not None:
|
|
|
|
tarinfo.uid = uid
|
|
|
|
tarinfo.uname = owner
|
|
|
|
return tarinfo
|
|
|
|
|
|
|
|
if not dry_run:
|
2016-12-16 13:04:17 -04:00
|
|
|
tar = tarfile.open(archive_name, 'w|%s' % tar_compression)
|
2010-02-23 01:16:41 -04:00
|
|
|
try:
|
|
|
|
tar.add(base_dir, filter=_set_uid_gid)
|
|
|
|
finally:
|
|
|
|
tar.close()
|
|
|
|
|
|
|
|
return archive_name
|
|
|
|
|
2010-04-21 10:32:26 -03:00
|
|
|
def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
|
2010-02-23 01:16:41 -04:00
|
|
|
# XXX see if we want to keep an external call here
|
|
|
|
if verbose:
|
|
|
|
zipoptions = "-r"
|
|
|
|
else:
|
|
|
|
zipoptions = "-rq"
|
|
|
|
from distutils.errors import DistutilsExecError
|
|
|
|
from distutils.spawn import spawn
|
|
|
|
try:
|
|
|
|
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
|
|
|
|
except DistutilsExecError:
|
|
|
|
# XXX really should distinguish between "couldn't find
|
|
|
|
# external 'zip' command" and "zip failed".
|
|
|
|
raise ExecError, \
|
|
|
|
("unable to create zip file '%s': "
|
|
|
|
"could neither import the 'zipfile' module nor "
|
|
|
|
"find a standalone zip utility") % zip_filename
|
|
|
|
|
|
|
|
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
|
|
|
|
"""Create a zip file from all the files under 'base_dir'.
|
|
|
|
|
2010-12-15 16:33:50 -04:00
|
|
|
The output zip file will be named 'base_name' + ".zip". Uses either the
|
2010-02-23 01:16:41 -04:00
|
|
|
"zipfile" Python module (if available) or the InfoZIP "zip" utility
|
|
|
|
(if installed and found on the default search path). If neither tool is
|
|
|
|
available, raises ExecError. Returns the name of the output zip
|
|
|
|
file.
|
|
|
|
"""
|
|
|
|
zip_filename = base_name + ".zip"
|
|
|
|
archive_dir = os.path.dirname(base_name)
|
|
|
|
|
2014-11-27 18:50:06 -04:00
|
|
|
if archive_dir and not os.path.exists(archive_dir):
|
2010-02-23 01:16:41 -04:00
|
|
|
if logger is not None:
|
|
|
|
logger.info("creating %s", archive_dir)
|
|
|
|
if not dry_run:
|
|
|
|
os.makedirs(archive_dir)
|
|
|
|
|
|
|
|
# If zipfile module is not available, try spawning an external 'zip'
|
|
|
|
# command.
|
|
|
|
try:
|
2016-12-16 13:04:17 -04:00
|
|
|
import zlib
|
2010-02-23 01:16:41 -04:00
|
|
|
import zipfile
|
|
|
|
except ImportError:
|
|
|
|
zipfile = None
|
|
|
|
|
|
|
|
if zipfile is None:
|
2010-04-21 10:32:26 -03:00
|
|
|
_call_external_zip(base_dir, zip_filename, verbose, dry_run)
|
2010-02-23 01:16:41 -04:00
|
|
|
else:
|
|
|
|
if logger is not None:
|
|
|
|
logger.info("creating '%s' and adding '%s' to it",
|
|
|
|
zip_filename, base_dir)
|
|
|
|
|
|
|
|
if not dry_run:
|
2014-02-02 16:30:22 -04:00
|
|
|
with zipfile.ZipFile(zip_filename, "w",
|
|
|
|
compression=zipfile.ZIP_DEFLATED) as zf:
|
2015-09-07 23:47:01 -03:00
|
|
|
path = os.path.normpath(base_dir)
|
2016-10-23 09:52:01 -03:00
|
|
|
if path != os.curdir:
|
|
|
|
zf.write(path, path)
|
|
|
|
if logger is not None:
|
|
|
|
logger.info("adding '%s'", path)
|
2014-02-02 16:30:22 -04:00
|
|
|
for dirpath, dirnames, filenames in os.walk(base_dir):
|
2015-09-07 23:47:01 -03:00
|
|
|
for name in sorted(dirnames):
|
|
|
|
path = os.path.normpath(os.path.join(dirpath, name))
|
|
|
|
zf.write(path, path)
|
|
|
|
if logger is not None:
|
|
|
|
logger.info("adding '%s'", path)
|
2014-02-02 16:30:22 -04:00
|
|
|
for name in filenames:
|
|
|
|
path = os.path.normpath(os.path.join(dirpath, name))
|
|
|
|
if os.path.isfile(path):
|
|
|
|
zf.write(path, path)
|
|
|
|
if logger is not None:
|
|
|
|
logger.info("adding '%s'", path)
|
2010-02-23 01:16:41 -04:00
|
|
|
|
|
|
|
return zip_filename
|
|
|
|
|
|
|
|
_ARCHIVE_FORMATS = {
|
|
|
|
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
|
2016-12-16 13:04:17 -04:00
|
|
|
'zip': (_make_zipfile, [], "ZIP file")
|
|
|
|
}
|
|
|
|
|
|
|
|
if _ZLIB_SUPPORTED:
|
|
|
|
_ARCHIVE_FORMATS['gztar'] = (_make_tarball, [('compress', 'gzip')],
|
|
|
|
"gzip'ed tar-file")
|
|
|
|
|
|
|
|
if _BZ2_SUPPORTED:
|
|
|
|
_ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
|
|
|
|
"bzip2'ed tar-file")
|
2010-02-23 01:16:41 -04:00
|
|
|
|
|
|
|
def get_archive_formats():
|
|
|
|
"""Returns a list of supported formats for archiving and unarchiving.
|
|
|
|
|
|
|
|
Each element of the returned sequence is a tuple (name, description)
|
|
|
|
"""
|
|
|
|
formats = [(name, registry[2]) for name, registry in
|
|
|
|
_ARCHIVE_FORMATS.items()]
|
|
|
|
formats.sort()
|
|
|
|
return formats
|
|
|
|
|
|
|
|
def register_archive_format(name, function, extra_args=None, description=''):
|
|
|
|
"""Registers an archive format.
|
|
|
|
|
|
|
|
name is the name of the format. function is the callable that will be
|
|
|
|
used to create archives. If provided, extra_args is a sequence of
|
|
|
|
(name, value) tuples that will be passed as arguments to the callable.
|
|
|
|
description can be provided to describe the format, and will be returned
|
|
|
|
by the get_archive_formats() function.
|
|
|
|
"""
|
|
|
|
if extra_args is None:
|
|
|
|
extra_args = []
|
2010-03-07 08:14:25 -04:00
|
|
|
if not isinstance(function, collections.Callable):
|
2010-02-23 01:16:41 -04:00
|
|
|
raise TypeError('The %s object is not callable' % function)
|
|
|
|
if not isinstance(extra_args, (tuple, list)):
|
|
|
|
raise TypeError('extra_args needs to be a sequence')
|
|
|
|
for element in extra_args:
|
|
|
|
if not isinstance(element, (tuple, list)) or len(element) !=2 :
|
|
|
|
raise TypeError('extra_args elements are : (arg_name, value)')
|
|
|
|
|
|
|
|
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
|
|
|
|
|
|
|
|
def unregister_archive_format(name):
|
|
|
|
del _ARCHIVE_FORMATS[name]
|
|
|
|
|
|
|
|
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
|
|
|
|
dry_run=0, owner=None, group=None, logger=None):
|
|
|
|
"""Create an archive file (eg. zip or tar).
|
|
|
|
|
|
|
|
'base_name' is the name of the file to create, minus any format-specific
|
2016-12-16 13:04:17 -04:00
|
|
|
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
|
|
|
|
or "bztar". Or any other registered format.
|
2010-02-23 01:16:41 -04:00
|
|
|
|
|
|
|
'root_dir' is a directory that will be the root directory of the
|
|
|
|
archive; ie. we typically chdir into 'root_dir' before creating the
|
|
|
|
archive. 'base_dir' is the directory where we start archiving from;
|
|
|
|
ie. 'base_dir' will be the common prefix of all files and
|
|
|
|
directories in the archive. 'root_dir' and 'base_dir' both default
|
|
|
|
to the current directory. Returns the name of the archive file.
|
|
|
|
|
|
|
|
'owner' and 'group' are used when creating a tar archive. By default,
|
|
|
|
uses the current owner and group.
|
|
|
|
"""
|
|
|
|
save_cwd = os.getcwd()
|
|
|
|
if root_dir is not None:
|
|
|
|
if logger is not None:
|
|
|
|
logger.debug("changing into '%s'", root_dir)
|
|
|
|
base_name = os.path.abspath(base_name)
|
|
|
|
if not dry_run:
|
|
|
|
os.chdir(root_dir)
|
|
|
|
|
|
|
|
if base_dir is None:
|
|
|
|
base_dir = os.curdir
|
|
|
|
|
|
|
|
kwargs = {'dry_run': dry_run, 'logger': logger}
|
|
|
|
|
|
|
|
try:
|
|
|
|
format_info = _ARCHIVE_FORMATS[format]
|
|
|
|
except KeyError:
|
|
|
|
raise ValueError, "unknown archive format '%s'" % format
|
|
|
|
|
|
|
|
func = format_info[0]
|
|
|
|
for arg, val in format_info[1]:
|
|
|
|
kwargs[arg] = val
|
|
|
|
|
|
|
|
if format != 'zip':
|
|
|
|
kwargs['owner'] = owner
|
|
|
|
kwargs['group'] = group
|
|
|
|
|
|
|
|
try:
|
|
|
|
filename = func(base_name, base_dir, **kwargs)
|
|
|
|
finally:
|
|
|
|
if root_dir is not None:
|
|
|
|
if logger is not None:
|
|
|
|
logger.debug("changing back to '%s'", save_cwd)
|
|
|
|
os.chdir(save_cwd)
|
|
|
|
|
|
|
|
return filename
|