2000-02-17 19:56:15 -04:00
|
|
|
"""distutils.command.sdist
|
|
|
|
|
|
|
|
Implements the Distutils 'sdist' command (create a source distribution)."""
|
|
|
|
|
|
|
|
# created 1999/09/22, Greg Ward
|
|
|
|
|
2000-03-01 21:49:45 -04:00
|
|
|
__revision__ = "$Id$"
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
import sys, os, string, re
|
|
|
|
import fnmatch
|
|
|
|
from types import *
|
|
|
|
from glob import glob
|
|
|
|
from distutils.core import Command
|
2000-06-07 21:14:18 -03:00
|
|
|
from distutils.util import \
|
|
|
|
convert_path, create_tree, remove_tree, newer, write_file, \
|
|
|
|
check_archive_formats, ARCHIVE_FORMATS
|
2000-02-17 19:56:15 -04:00
|
|
|
from distutils.text_file import TextFile
|
2000-04-22 00:11:55 -03:00
|
|
|
from distutils.errors import DistutilsExecError, DistutilsOptionError
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
|
2000-02-17 20:13:53 -04:00
|
|
|
class sdist (Command):
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
description = "create a source distribution (tarball, zip file, etc.)"
|
|
|
|
|
2000-02-17 20:25:39 -04:00
|
|
|
user_options = [
|
|
|
|
('template=', 't',
|
|
|
|
"name of manifest template file [default: MANIFEST.in]"),
|
|
|
|
('manifest=', 'm',
|
|
|
|
"name of manifest file [default: MANIFEST]"),
|
|
|
|
('use-defaults', None,
|
|
|
|
"include the default file set in the manifest "
|
|
|
|
"[default; disable with --no-defaults]"),
|
2000-04-25 22:14:33 -03:00
|
|
|
('manifest-only', 'o',
|
2000-06-07 21:46:45 -03:00
|
|
|
"just regenerate the manifest and then stop "
|
|
|
|
"(implies --force-manifest)"),
|
2000-04-25 22:14:33 -03:00
|
|
|
('force-manifest', 'f',
|
2000-02-17 20:25:39 -04:00
|
|
|
"forcibly regenerate the manifest and carry on as usual"),
|
|
|
|
('formats=', None,
|
2000-06-07 00:00:06 -03:00
|
|
|
"formats for source distribution"),
|
2000-02-17 20:25:39 -04:00
|
|
|
('keep-tree', 'k',
|
|
|
|
"keep the distribution tree around after creating " +
|
|
|
|
"archive file(s)"),
|
|
|
|
]
|
2000-06-07 21:14:18 -03:00
|
|
|
|
|
|
|
|
|
|
|
# XXX ugh: this has to precede the 'help_options' list, because
|
|
|
|
# it is mentioned there -- also, this is not a method, even though
|
|
|
|
# it's defined in a class: double-ugh!
|
|
|
|
def show_formats ():
|
|
|
|
"""Print all possible values for the 'formats' option -- used by
|
|
|
|
the "--help-formats" command-line option.
|
|
|
|
"""
|
2000-06-07 00:00:06 -03:00
|
|
|
from distutils.fancy_getopt import FancyGetopt
|
2000-06-07 21:14:18 -03:00
|
|
|
formats=[]
|
2000-06-07 00:00:06 -03:00
|
|
|
for format in ARCHIVE_FORMATS.keys():
|
2000-06-07 21:14:18 -03:00
|
|
|
formats.append(("formats="+format,None,ARCHIVE_FORMATS[format][2]))
|
|
|
|
formats.sort()
|
|
|
|
pretty_printer = FancyGetopt(formats)
|
|
|
|
pretty_printer.print_help(
|
|
|
|
"List of available source distribution formats:")
|
2000-06-07 00:00:06 -03:00
|
|
|
|
|
|
|
help_options = [
|
|
|
|
('help-formats', None,
|
2000-06-07 21:14:18 -03:00
|
|
|
"lists available distribution formats", show_formats),
|
2000-06-07 00:00:06 -03:00
|
|
|
]
|
|
|
|
|
2000-02-17 19:56:15 -04:00
|
|
|
negative_opts = {'use-defaults': 'no-defaults'}
|
|
|
|
|
|
|
|
default_format = { 'posix': 'gztar',
|
|
|
|
'nt': 'zip' }
|
|
|
|
|
|
|
|
|
2000-02-17 20:35:22 -04:00
|
|
|
def initialize_options (self):
|
2000-02-17 19:56:15 -04:00
|
|
|
# 'template' and 'manifest' are, respectively, the names of
|
|
|
|
# the manifest template and manifest file.
|
|
|
|
self.template = None
|
|
|
|
self.manifest = None
|
|
|
|
|
|
|
|
# 'use_defaults': if true, we will include the default file set
|
|
|
|
# in the manifest
|
|
|
|
self.use_defaults = 1
|
|
|
|
|
|
|
|
self.manifest_only = 0
|
|
|
|
self.force_manifest = 0
|
|
|
|
|
|
|
|
self.formats = None
|
|
|
|
self.keep_tree = 0
|
|
|
|
|
2000-05-31 22:10:56 -03:00
|
|
|
self.archive_files = None
|
|
|
|
|
2000-02-17 19:56:15 -04:00
|
|
|
|
2000-02-17 20:35:22 -04:00
|
|
|
def finalize_options (self):
|
2000-02-17 19:56:15 -04:00
|
|
|
if self.manifest is None:
|
|
|
|
self.manifest = "MANIFEST"
|
|
|
|
if self.template is None:
|
|
|
|
self.template = "MANIFEST.in"
|
|
|
|
|
2000-06-04 12:12:51 -03:00
|
|
|
self.ensure_string_list('formats')
|
2000-02-17 19:56:15 -04:00
|
|
|
if self.formats is None:
|
|
|
|
try:
|
|
|
|
self.formats = [self.default_format[os.name]]
|
|
|
|
except KeyError:
|
|
|
|
raise DistutilsPlatformError, \
|
2000-03-30 22:50:04 -04:00
|
|
|
"don't know how to create source distributions " + \
|
|
|
|
"on platform %s" % os.name
|
2000-02-17 19:56:15 -04:00
|
|
|
|
2000-04-22 00:11:55 -03:00
|
|
|
bad_format = check_archive_formats (self.formats)
|
|
|
|
if bad_format:
|
|
|
|
raise DistutilsOptionError, \
|
|
|
|
"unknown archive format '%s'" % bad_format
|
|
|
|
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
def run (self):
|
|
|
|
|
|
|
|
# 'files' is the list of files that will make up the manifest
|
|
|
|
self.files = []
|
|
|
|
|
|
|
|
# Ensure that all required meta-data is given; warn if not (but
|
|
|
|
# don't die, it's not *that* serious!)
|
|
|
|
self.check_metadata ()
|
|
|
|
|
|
|
|
# Do whatever it takes to get the list of files to process
|
|
|
|
# (process the manifest template, read an existing manifest,
|
|
|
|
# whatever). File list is put into 'self.files'.
|
|
|
|
self.get_file_list ()
|
|
|
|
|
|
|
|
# If user just wanted us to regenerate the manifest, stop now.
|
|
|
|
if self.manifest_only:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Otherwise, go ahead and create the source distribution tarball,
|
|
|
|
# or zipfile, or whatever.
|
|
|
|
self.make_distribution ()
|
|
|
|
|
|
|
|
|
|
|
|
def check_metadata (self):
|
2000-06-07 21:46:45 -03:00
|
|
|
"""Ensure that all required elements of meta-data (name, version,
|
|
|
|
URL, (author and author_email) or (maintainer and
|
|
|
|
maintainer_email)) are supplied by the Distribution object; warn if
|
|
|
|
any are missing.
|
|
|
|
"""
|
2000-04-21 01:37:12 -03:00
|
|
|
metadata = self.distribution.metadata
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
missing = []
|
|
|
|
for attr in ('name', 'version', 'url'):
|
2000-04-21 01:37:12 -03:00
|
|
|
if not (hasattr (metadata, attr) and getattr (metadata, attr)):
|
2000-02-17 19:56:15 -04:00
|
|
|
missing.append (attr)
|
|
|
|
|
|
|
|
if missing:
|
|
|
|
self.warn ("missing required meta-data: " +
|
|
|
|
string.join (missing, ", "))
|
|
|
|
|
2000-04-21 01:37:12 -03:00
|
|
|
if metadata.author:
|
|
|
|
if not metadata.author_email:
|
2000-02-17 19:56:15 -04:00
|
|
|
self.warn ("missing meta-data: if 'author' supplied, " +
|
|
|
|
"'author_email' must be supplied too")
|
2000-04-21 01:37:12 -03:00
|
|
|
elif metadata.maintainer:
|
|
|
|
if not metadata.maintainer_email:
|
2000-02-17 19:56:15 -04:00
|
|
|
self.warn ("missing meta-data: if 'maintainer' supplied, " +
|
|
|
|
"'maintainer_email' must be supplied too")
|
|
|
|
else:
|
|
|
|
self.warn ("missing meta-data: either (author and author_email) " +
|
|
|
|
"or (maintainer and maintainer_email) " +
|
|
|
|
"must be supplied")
|
|
|
|
|
|
|
|
# check_metadata ()
|
|
|
|
|
|
|
|
|
|
|
|
def get_file_list (self):
|
|
|
|
"""Figure out the list of files to include in the source
|
2000-06-07 21:24:01 -03:00
|
|
|
distribution, and put it in 'self.files'. This might involve
|
|
|
|
reading the manifest template (and writing the manifest), or just
|
|
|
|
reading the manifest, or just using the default file set -- it all
|
|
|
|
depends on the user's options and the state of the filesystem.
|
|
|
|
"""
|
2000-02-17 19:56:15 -04:00
|
|
|
template_exists = os.path.isfile (self.template)
|
|
|
|
if template_exists:
|
|
|
|
template_newer = newer (self.template, self.manifest)
|
|
|
|
|
|
|
|
# Regenerate the manifest if necessary (or if explicitly told to)
|
|
|
|
if ((template_exists and template_newer) or
|
|
|
|
self.force_manifest or
|
|
|
|
self.manifest_only):
|
|
|
|
|
|
|
|
if not template_exists:
|
|
|
|
self.warn (("manifest template '%s' does not exist " +
|
|
|
|
"(using default file list)") %
|
|
|
|
self.template)
|
|
|
|
|
|
|
|
# Add default file set to 'files'
|
|
|
|
if self.use_defaults:
|
2000-06-07 21:52:52 -03:00
|
|
|
self.add_defaults ()
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
# Read manifest template if it exists
|
|
|
|
if template_exists:
|
|
|
|
self.read_template ()
|
|
|
|
|
|
|
|
# File list now complete -- sort it so that higher-level files
|
|
|
|
# come first
|
|
|
|
sortable_files = map (os.path.split, self.files)
|
|
|
|
sortable_files.sort ()
|
|
|
|
self.files = []
|
|
|
|
for sort_tuple in sortable_files:
|
|
|
|
self.files.append (apply (os.path.join, sort_tuple))
|
|
|
|
|
|
|
|
# Remove duplicates from the file list
|
|
|
|
for i in range (len(self.files)-1, 0, -1):
|
|
|
|
if self.files[i] == self.files[i-1]:
|
|
|
|
del self.files[i]
|
|
|
|
|
|
|
|
# And write complete file list (including default file set) to
|
|
|
|
# the manifest.
|
|
|
|
self.write_manifest ()
|
|
|
|
|
|
|
|
# Don't regenerate the manifest, just read it in.
|
|
|
|
else:
|
|
|
|
self.read_manifest ()
|
|
|
|
|
|
|
|
# get_file_list ()
|
|
|
|
|
|
|
|
|
2000-06-07 21:52:52 -03:00
|
|
|
def add_defaults (self):
|
2000-06-07 21:46:45 -03:00
|
|
|
"""Add all the default files to self.files:
|
|
|
|
- README or README.txt
|
|
|
|
- setup.py
|
|
|
|
- test/test*.py
|
|
|
|
- all pure Python modules mentioned in setup script
|
|
|
|
- all C sources listed as part of extensions or C libraries
|
|
|
|
in the setup script (doesn't catch C headers!)
|
|
|
|
Warns if (README or README.txt) or setup.py are missing; everything
|
|
|
|
else is optional.
|
|
|
|
"""
|
2000-02-17 19:56:15 -04:00
|
|
|
standards = [('README', 'README.txt'), 'setup.py']
|
|
|
|
for fn in standards:
|
|
|
|
if type (fn) is TupleType:
|
|
|
|
alts = fn
|
2000-02-23 23:17:43 -04:00
|
|
|
got_it = 0
|
2000-02-17 19:56:15 -04:00
|
|
|
for fn in alts:
|
|
|
|
if os.path.exists (fn):
|
|
|
|
got_it = 1
|
|
|
|
self.files.append (fn)
|
|
|
|
break
|
|
|
|
|
|
|
|
if not got_it:
|
|
|
|
self.warn ("standard file not found: should have one of " +
|
|
|
|
string.join (alts, ', '))
|
|
|
|
else:
|
|
|
|
if os.path.exists (fn):
|
|
|
|
self.files.append (fn)
|
|
|
|
else:
|
|
|
|
self.warn ("standard file '%s' not found" % fn)
|
|
|
|
|
|
|
|
optional = ['test/test*.py']
|
|
|
|
for pattern in optional:
|
|
|
|
files = filter (os.path.isfile, glob (pattern))
|
|
|
|
if files:
|
|
|
|
self.files.extend (files)
|
|
|
|
|
2000-03-30 22:50:04 -04:00
|
|
|
if self.distribution.has_pure_modules():
|
2000-05-27 14:27:23 -03:00
|
|
|
build_py = self.get_finalized_command ('build_py')
|
2000-02-17 19:56:15 -04:00
|
|
|
self.files.extend (build_py.get_source_files ())
|
|
|
|
|
2000-03-30 22:50:04 -04:00
|
|
|
if self.distribution.has_ext_modules():
|
2000-05-27 14:27:23 -03:00
|
|
|
build_ext = self.get_finalized_command ('build_ext')
|
2000-02-17 19:56:15 -04:00
|
|
|
self.files.extend (build_ext.get_source_files ())
|
2000-04-09 00:51:40 -03:00
|
|
|
|
|
|
|
if self.distribution.has_c_libraries():
|
2000-05-27 14:27:23 -03:00
|
|
|
build_clib = self.get_finalized_command ('build_clib')
|
2000-04-09 00:51:40 -03:00
|
|
|
self.files.extend (build_clib.get_source_files ())
|
2000-02-17 19:56:15 -04:00
|
|
|
|
2000-06-07 21:52:52 -03:00
|
|
|
# add_defaults ()
|
|
|
|
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
def search_dir (self, dir, pattern=None):
|
|
|
|
"""Recursively find files under 'dir' matching 'pattern' (a string
|
2000-06-07 21:24:01 -03:00
|
|
|
containing a Unix-style glob pattern). If 'pattern' is None, find
|
|
|
|
all files under 'dir'. Return the list of found filenames.
|
|
|
|
"""
|
2000-02-17 19:56:15 -04:00
|
|
|
allfiles = findall (dir)
|
|
|
|
if pattern is None:
|
|
|
|
return allfiles
|
|
|
|
|
|
|
|
pattern_re = translate_pattern (pattern)
|
|
|
|
files = []
|
|
|
|
for file in allfiles:
|
|
|
|
if pattern_re.match (os.path.basename (file)):
|
|
|
|
files.append (file)
|
|
|
|
|
|
|
|
return files
|
|
|
|
|
|
|
|
# search_dir ()
|
|
|
|
|
|
|
|
|
|
|
|
def recursive_exclude_pattern (self, dir, pattern=None):
|
2000-06-07 21:24:01 -03:00
|
|
|
"""Remove filenames from 'self.files' that are under 'dir' and
|
|
|
|
whose basenames match 'pattern'.
|
|
|
|
"""
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("recursive_exclude_pattern: dir=%s, pattern=%s" %
|
|
|
|
(dir, pattern))
|
2000-02-17 19:56:15 -04:00
|
|
|
if pattern is None:
|
|
|
|
pattern_re = None
|
|
|
|
else:
|
|
|
|
pattern_re = translate_pattern (pattern)
|
|
|
|
|
|
|
|
for i in range (len (self.files)-1, -1, -1):
|
|
|
|
(cur_dir, cur_base) = os.path.split (self.files[i])
|
|
|
|
if (cur_dir == dir and
|
|
|
|
(pattern_re is None or pattern_re.match (cur_base))):
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("removing %s" % self.files[i])
|
2000-02-17 19:56:15 -04:00
|
|
|
del self.files[i]
|
|
|
|
|
|
|
|
|
|
|
|
def read_template (self):
|
|
|
|
"""Read and parse the manifest template file named by
|
2000-06-07 21:24:01 -03:00
|
|
|
'self.template' (usually "MANIFEST.in"). Process all file
|
|
|
|
specifications (include and exclude) in the manifest template and
|
2000-06-07 21:46:45 -03:00
|
|
|
update 'self.files' accordingly (filenames may be added to
|
|
|
|
or removed from 'self.files' based on the manifest template).
|
2000-06-07 21:24:01 -03:00
|
|
|
"""
|
2000-02-17 19:56:15 -04:00
|
|
|
assert self.files is not None and type (self.files) is ListType
|
2000-06-07 21:08:14 -03:00
|
|
|
self.announce("reading manifest template '%s'" % self.template)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
template = TextFile (self.template,
|
|
|
|
strip_comments=1,
|
|
|
|
skip_blanks=1,
|
|
|
|
join_lines=1,
|
|
|
|
lstrip_ws=1,
|
|
|
|
rstrip_ws=1,
|
|
|
|
collapse_ws=1)
|
|
|
|
|
|
|
|
all_files = findall ()
|
|
|
|
|
|
|
|
while 1:
|
|
|
|
|
|
|
|
line = template.readline()
|
|
|
|
if line is None: # end of file
|
|
|
|
break
|
|
|
|
|
|
|
|
words = string.split (line)
|
|
|
|
action = words[0]
|
|
|
|
|
|
|
|
# First, check that the right number of words are present
|
|
|
|
# for the given action (which is the first word)
|
|
|
|
if action in ('include','exclude',
|
|
|
|
'global-include','global-exclude'):
|
2000-04-21 01:31:10 -03:00
|
|
|
if len (words) < 2:
|
2000-02-17 19:56:15 -04:00
|
|
|
template.warn \
|
|
|
|
("invalid manifest template line: " +
|
2000-04-21 01:31:10 -03:00
|
|
|
"'%s' expects <pattern1> <pattern2> ..." %
|
2000-02-17 19:56:15 -04:00
|
|
|
action)
|
|
|
|
continue
|
|
|
|
|
2000-05-30 23:32:10 -03:00
|
|
|
pattern_list = map(convert_path, words[1:])
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action in ('recursive-include','recursive-exclude'):
|
2000-04-21 01:31:10 -03:00
|
|
|
if len (words) < 3:
|
2000-02-17 19:56:15 -04:00
|
|
|
template.warn \
|
|
|
|
("invalid manifest template line: " +
|
2000-04-21 01:31:10 -03:00
|
|
|
"'%s' expects <dir> <pattern1> <pattern2> ..." %
|
2000-02-17 19:56:15 -04:00
|
|
|
action)
|
|
|
|
continue
|
|
|
|
|
2000-05-30 23:32:10 -03:00
|
|
|
dir = convert_path(words[1])
|
|
|
|
pattern_list = map (convert_path, words[2:])
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action in ('graft','prune'):
|
|
|
|
if len (words) != 2:
|
|
|
|
template.warn \
|
|
|
|
("invalid manifest template line: " +
|
|
|
|
"'%s' expects a single <dir_pattern>" %
|
|
|
|
action)
|
|
|
|
continue
|
|
|
|
|
2000-05-30 23:32:10 -03:00
|
|
|
dir_pattern = convert_path (words[1])
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
else:
|
|
|
|
template.warn ("invalid manifest template line: " +
|
|
|
|
"unknown action '%s'" % action)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# OK, now we know that the action is valid and we have the
|
|
|
|
# right number of words on the line for that action -- so we
|
|
|
|
# can proceed with minimal error-checking. Also, we have
|
2000-04-13 21:49:30 -03:00
|
|
|
# defined either (pattern), (dir and pattern), or
|
|
|
|
# (dir_pattern) -- so we don't have to spend any time
|
|
|
|
# digging stuff up out of 'words'.
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
if action == 'include':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("include " + string.join(pattern_list))
|
2000-04-21 01:31:10 -03:00
|
|
|
for pattern in pattern_list:
|
2000-06-07 21:08:14 -03:00
|
|
|
files = self.select_pattern (all_files, pattern, anchor=1)
|
2000-04-21 01:31:10 -03:00
|
|
|
if not files:
|
2000-06-07 21:08:14 -03:00
|
|
|
template.warn ("no files found matching '%s'" %
|
|
|
|
pattern)
|
2000-04-21 01:31:10 -03:00
|
|
|
else:
|
|
|
|
self.files.extend (files)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action == 'exclude':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("exclude " + string.join(pattern_list))
|
2000-04-21 01:31:10 -03:00
|
|
|
for pattern in pattern_list:
|
2000-06-07 21:08:14 -03:00
|
|
|
num = self.exclude_pattern (self.files, pattern, anchor=1)
|
2000-04-21 01:31:10 -03:00
|
|
|
if num == 0:
|
|
|
|
template.warn (
|
|
|
|
"no previously-included files found matching '%s'"%
|
|
|
|
pattern)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action == 'global-include':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("global-include " + string.join(pattern_list))
|
2000-04-21 01:31:10 -03:00
|
|
|
for pattern in pattern_list:
|
2000-06-07 21:08:14 -03:00
|
|
|
files = self.select_pattern (all_files, pattern, anchor=0)
|
2000-04-21 01:31:10 -03:00
|
|
|
if not files:
|
|
|
|
template.warn (("no files found matching '%s' " +
|
|
|
|
"anywhere in distribution") %
|
|
|
|
pattern)
|
|
|
|
else:
|
|
|
|
self.files.extend (files)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action == 'global-exclude':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("global-exclude " + string.join(pattern_list))
|
2000-04-21 01:31:10 -03:00
|
|
|
for pattern in pattern_list:
|
2000-06-07 21:08:14 -03:00
|
|
|
num = self.exclude_pattern (self.files, pattern, anchor=0)
|
2000-04-21 01:31:10 -03:00
|
|
|
if num == 0:
|
|
|
|
template.warn \
|
|
|
|
(("no previously-included files matching '%s' " +
|
|
|
|
"found anywhere in distribution") %
|
|
|
|
pattern)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action == 'recursive-include':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("recursive-include %s %s" %
|
|
|
|
(dir, string.join(pattern_list)))
|
2000-04-21 01:31:10 -03:00
|
|
|
for pattern in pattern_list:
|
2000-06-07 21:08:14 -03:00
|
|
|
files = self.select_pattern (
|
|
|
|
all_files, pattern, prefix=dir)
|
2000-04-21 01:31:10 -03:00
|
|
|
if not files:
|
|
|
|
template.warn (("no files found matching '%s' " +
|
|
|
|
"under directory '%s'") %
|
|
|
|
(pattern, dir))
|
|
|
|
else:
|
|
|
|
self.files.extend (files)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action == 'recursive-exclude':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("recursive-exclude %s %s" %
|
|
|
|
(dir, string.join(pattern_list)))
|
2000-04-21 01:31:10 -03:00
|
|
|
for pattern in pattern_list:
|
2000-06-07 21:08:14 -03:00
|
|
|
num = self.exclude_pattern(
|
|
|
|
self.files, pattern, prefix=dir)
|
2000-04-21 01:31:10 -03:00
|
|
|
if num == 0:
|
|
|
|
template.warn \
|
|
|
|
(("no previously-included files matching '%s' " +
|
|
|
|
"found under directory '%s'") %
|
|
|
|
(pattern, dir))
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
elif action == 'graft':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("graft " + dir_pattern)
|
|
|
|
files = self.select_pattern(
|
|
|
|
all_files, None, prefix=dir_pattern)
|
2000-02-17 19:56:15 -04:00
|
|
|
if not files:
|
|
|
|
template.warn ("no directories found matching '%s'" %
|
|
|
|
dir_pattern)
|
|
|
|
else:
|
|
|
|
self.files.extend (files)
|
|
|
|
|
|
|
|
elif action == 'prune':
|
2000-06-07 21:08:14 -03:00
|
|
|
self.debug_print("prune " + dir_pattern)
|
|
|
|
num = self.exclude_pattern(
|
|
|
|
self.files, None, prefix=dir_pattern)
|
2000-02-17 19:56:15 -04:00
|
|
|
if num == 0:
|
|
|
|
template.warn \
|
|
|
|
(("no previously-included directories found " +
|
|
|
|
"matching '%s'") %
|
|
|
|
dir_pattern)
|
|
|
|
else:
|
|
|
|
raise RuntimeError, \
|
|
|
|
"this cannot happen: invalid action '%s'" % action
|
|
|
|
|
|
|
|
# while loop over lines of template file
|
|
|
|
|
2000-05-27 00:03:23 -03:00
|
|
|
# Prune away the build and source distribution directories
|
2000-05-27 14:27:23 -03:00
|
|
|
build = self.get_finalized_command ('build')
|
2000-06-07 21:08:14 -03:00
|
|
|
self.exclude_pattern (self.files, None, prefix=build.build_base)
|
2000-05-27 00:03:23 -03:00
|
|
|
|
|
|
|
base_dir = self.distribution.get_fullname()
|
2000-06-07 21:08:14 -03:00
|
|
|
self.exclude_pattern (self.files, None, prefix=base_dir)
|
2000-05-27 00:03:23 -03:00
|
|
|
|
2000-02-17 19:56:15 -04:00
|
|
|
# read_template ()
|
|
|
|
|
|
|
|
|
2000-06-07 21:08:14 -03:00
|
|
|
def select_pattern (self, files, pattern, anchor=1, prefix=None):
|
|
|
|
"""Select strings (presumably filenames) from 'files' that match
|
|
|
|
'pattern', a Unix-style wildcard (glob) pattern. Patterns are not
|
|
|
|
quite the same as implemented by the 'fnmatch' module: '*' and '?'
|
|
|
|
match non-special characters, where "special" is platform-dependent:
|
|
|
|
slash on Unix, colon, slash, and backslash on DOS/Windows, and colon on
|
|
|
|
Mac OS.
|
|
|
|
|
|
|
|
If 'anchor' is true (the default), then the pattern match is more
|
|
|
|
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
|
|
|
'anchor' is false, both of these will match.
|
|
|
|
|
|
|
|
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
|
|
|
(itself a pattern) and ending with 'pattern', with anything in between
|
|
|
|
them, will match. 'anchor' is ignored in this case.
|
|
|
|
|
|
|
|
Return the list of matching strings, possibly empty.
|
|
|
|
"""
|
|
|
|
matches = []
|
|
|
|
pattern_re = translate_pattern (pattern, anchor, prefix)
|
|
|
|
self.debug_print("select_pattern: applying regex r'%s'" %
|
|
|
|
pattern_re.pattern)
|
|
|
|
for name in files:
|
|
|
|
if pattern_re.search (name):
|
|
|
|
matches.append (name)
|
|
|
|
self.debug_print(" adding " + name)
|
|
|
|
|
|
|
|
return matches
|
|
|
|
|
|
|
|
# select_pattern ()
|
|
|
|
|
|
|
|
|
|
|
|
def exclude_pattern (self, files, pattern, anchor=1, prefix=None):
|
|
|
|
"""Remove strings (presumably filenames) from 'files' that match
|
|
|
|
'pattern'. 'pattern', 'anchor', 'and 'prefix' are the same
|
|
|
|
as for 'select_pattern()', above. The list 'files' is modified
|
|
|
|
in place.
|
|
|
|
"""
|
|
|
|
pattern_re = translate_pattern (pattern, anchor, prefix)
|
|
|
|
self.debug_print("exclude_pattern: applying regex r'%s'" %
|
|
|
|
pattern_re.pattern)
|
|
|
|
for i in range (len(files)-1, -1, -1):
|
|
|
|
if pattern_re.search (files[i]):
|
|
|
|
self.debug_print(" removing " + files[i])
|
|
|
|
del files[i]
|
|
|
|
|
|
|
|
# exclude_pattern ()
|
|
|
|
|
|
|
|
|
2000-02-17 19:56:15 -04:00
|
|
|
def write_manifest (self):
|
2000-06-07 21:24:01 -03:00
|
|
|
"""Write the file list in 'self.files' (presumably as filled in by
|
2000-06-07 21:52:52 -03:00
|
|
|
'add_defaults()' and 'read_template()') to the manifest file named
|
2000-06-07 21:24:01 -03:00
|
|
|
by 'self.manifest'.
|
|
|
|
"""
|
2000-04-25 22:12:40 -03:00
|
|
|
self.execute(write_file,
|
|
|
|
(self.manifest, self.files),
|
2000-06-07 21:08:14 -03:00
|
|
|
"writing manifest file '%s'" % self.manifest)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
# write_manifest ()
|
|
|
|
|
|
|
|
|
|
|
|
def read_manifest (self):
|
2000-06-07 21:24:01 -03:00
|
|
|
"""Read the manifest file (named by 'self.manifest') and use it to
|
|
|
|
fill in 'self.files', the list of files to include in the source
|
|
|
|
distribution.
|
|
|
|
"""
|
2000-06-07 21:08:14 -03:00
|
|
|
self.announce("reading manifest file '%s'" % self.manifest)
|
2000-02-17 19:56:15 -04:00
|
|
|
manifest = open (self.manifest)
|
|
|
|
while 1:
|
|
|
|
line = manifest.readline ()
|
|
|
|
if line == '': # end of file
|
|
|
|
break
|
|
|
|
if line[-1] == '\n':
|
|
|
|
line = line[0:-1]
|
|
|
|
self.files.append (line)
|
|
|
|
|
|
|
|
# read_manifest ()
|
|
|
|
|
|
|
|
|
|
|
|
def make_release_tree (self, base_dir, files):
|
2000-06-07 21:46:45 -03:00
|
|
|
"""Create the directory tree that will become the source
|
|
|
|
distribution archive. All directories implied by the filenames in
|
|
|
|
'files' are created under 'base_dir', and then we hard link or copy
|
|
|
|
(if hard linking is unavailable) those files into place.
|
|
|
|
Essentially, this duplicates the developer's source tree, but in a
|
|
|
|
directory named after the distribution, containing only the files
|
|
|
|
to be distributed.
|
|
|
|
"""
|
2000-03-30 22:50:04 -04:00
|
|
|
# Create all the directories under 'base_dir' necessary to
|
|
|
|
# put 'files' there.
|
|
|
|
create_tree (base_dir, files,
|
|
|
|
verbose=self.verbose, dry_run=self.dry_run)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
# And walk over the list of files, either making a hard link (if
|
|
|
|
# os.link exists) to each one that doesn't already exist in its
|
|
|
|
# corresponding location under 'base_dir', or copying each file
|
|
|
|
# that's out-of-date in 'base_dir'. (Usually, all files will be
|
|
|
|
# out-of-date, because by default we blow away 'base_dir' when
|
|
|
|
# we're done making the distribution archives.)
|
|
|
|
|
2000-03-30 22:50:04 -04:00
|
|
|
if hasattr (os, 'link'): # can make hard links on this system
|
|
|
|
link = 'hard'
|
2000-02-17 19:56:15 -04:00
|
|
|
msg = "making hard links in %s..." % base_dir
|
2000-03-30 22:50:04 -04:00
|
|
|
else: # nope, have to copy
|
|
|
|
link = None
|
2000-02-17 19:56:15 -04:00
|
|
|
msg = "copying files to %s..." % base_dir
|
|
|
|
|
|
|
|
self.announce (msg)
|
|
|
|
for file in files:
|
|
|
|
dest = os.path.join (base_dir, file)
|
2000-03-30 22:50:04 -04:00
|
|
|
self.copy_file (file, dest, link=link)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
# make_release_tree ()
|
|
|
|
|
|
|
|
|
|
|
|
def make_distribution (self):
|
2000-06-07 21:46:45 -03:00
|
|
|
"""Create the source distribution(s). First, we create the release
|
|
|
|
tree with 'make_release_tree()'; then, we create all required
|
|
|
|
archive files (according to 'self.formats') from the release tree.
|
|
|
|
Finally, we clean up by blowing away the release tree (unless
|
|
|
|
'self.keep_tree' is true). The list of archive files created is
|
|
|
|
stored so it can be retrieved later by 'get_archive_files()'.
|
|
|
|
"""
|
2000-03-30 22:50:04 -04:00
|
|
|
# Don't warn about missing meta-data here -- should be (and is!)
|
|
|
|
# done elsewhere.
|
2000-04-21 23:51:25 -03:00
|
|
|
base_dir = self.distribution.get_fullname()
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
# Remove any files that match "base_dir" from the fileset -- we
|
|
|
|
# don't want to go distributing the distribution inside itself!
|
2000-06-07 21:08:14 -03:00
|
|
|
self.exclude_pattern (self.files, base_dir + "*")
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
self.make_release_tree (base_dir, self.files)
|
2000-05-31 22:10:56 -03:00
|
|
|
archive_files = [] # remember names of files we create
|
2000-02-17 19:56:15 -04:00
|
|
|
for fmt in self.formats:
|
2000-05-31 22:10:56 -03:00
|
|
|
file = self.make_archive (base_dir, fmt, base_dir=base_dir)
|
|
|
|
archive_files.append(file)
|
|
|
|
|
|
|
|
self.archive_files = archive_files
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
if not self.keep_tree:
|
2000-03-18 11:43:42 -04:00
|
|
|
remove_tree (base_dir, self.verbose, self.dry_run)
|
2000-02-17 19:56:15 -04:00
|
|
|
|
2000-05-31 22:10:56 -03:00
|
|
|
def get_archive_files (self):
|
|
|
|
"""Return the list of archive files created when the command
|
|
|
|
was run, or None if the command hasn't run yet.
|
|
|
|
"""
|
|
|
|
return self.archive_files
|
|
|
|
|
2000-05-24 22:10:04 -03:00
|
|
|
# class sdist
|
2000-02-17 19:56:15 -04:00
|
|
|
|
|
|
|
|
|
|
|
# ----------------------------------------------------------------------
|
|
|
|
# Utility functions
|
|
|
|
|
|
|
|
def findall (dir = os.curdir):
|
2000-06-07 21:46:45 -03:00
|
|
|
"""Find all files under 'dir' and return the list of full filenames
|
|
|
|
(relative to 'dir').
|
|
|
|
"""
|
2000-02-17 19:56:15 -04:00
|
|
|
list = []
|
|
|
|
stack = [dir]
|
|
|
|
pop = stack.pop
|
|
|
|
push = stack.append
|
|
|
|
|
|
|
|
while stack:
|
|
|
|
dir = pop()
|
|
|
|
names = os.listdir (dir)
|
|
|
|
|
|
|
|
for name in names:
|
|
|
|
if dir != os.curdir: # avoid the dreaded "./" syndrome
|
|
|
|
fullname = os.path.join (dir, name)
|
|
|
|
else:
|
|
|
|
fullname = name
|
|
|
|
list.append (fullname)
|
|
|
|
if os.path.isdir (fullname) and not os.path.islink(fullname):
|
|
|
|
push (fullname)
|
|
|
|
|
|
|
|
return list
|
|
|
|
|
|
|
|
|
|
|
|
def glob_to_re (pattern):
|
2000-06-07 21:46:45 -03:00
|
|
|
"""Translate a shell-like glob pattern to a regular expression; return
|
|
|
|
a string containing the regex. Differs from 'fnmatch.translate()' in
|
|
|
|
that '*' does not match "special characters" (which are
|
|
|
|
platform-specific).
|
|
|
|
"""
|
2000-02-17 19:56:15 -04:00
|
|
|
pattern_re = fnmatch.translate (pattern)
|
|
|
|
|
|
|
|
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
|
|
|
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
|
|
|
# and by extension they shouldn't match such "special characters" under
|
|
|
|
# any OS. So change all non-escaped dots in the RE to match any
|
|
|
|
# character except the special characters.
|
|
|
|
# XXX currently the "special characters" are just slash -- i.e. this is
|
|
|
|
# Unix-only.
|
|
|
|
pattern_re = re.sub (r'(^|[^\\])\.', r'\1[^/]', pattern_re)
|
|
|
|
return pattern_re
|
|
|
|
|
|
|
|
# glob_to_re ()
|
|
|
|
|
|
|
|
|
|
|
|
def translate_pattern (pattern, anchor=1, prefix=None):
|
|
|
|
"""Translate a shell-like wildcard pattern to a compiled regular
|
2000-06-07 21:46:45 -03:00
|
|
|
expression. Return the compiled regex.
|
|
|
|
"""
|
2000-02-17 19:56:15 -04:00
|
|
|
if pattern:
|
|
|
|
pattern_re = glob_to_re (pattern)
|
|
|
|
else:
|
|
|
|
pattern_re = ''
|
|
|
|
|
|
|
|
if prefix is not None:
|
|
|
|
prefix_re = (glob_to_re (prefix))[0:-1] # ditch trailing $
|
|
|
|
pattern_re = "^" + os.path.join (prefix_re, ".*" + pattern_re)
|
|
|
|
else: # no prefix -- respect anchor flag
|
|
|
|
if anchor:
|
|
|
|
pattern_re = "^" + pattern_re
|
|
|
|
|
|
|
|
return re.compile (pattern_re)
|
|
|
|
|
|
|
|
# translate_pattern ()
|