Initial import of setuptools, with integrated tests.

This commit is contained in:
Phillip J. Eby 2006-04-18 04:05:34 +00:00
parent e247e89846
commit 069159b113
39 changed files with 10376 additions and 0 deletions

6
Lib/easy_install.py Normal file
View File

@ -0,0 +1,6 @@
"""Run the EasyInstall command"""
if __name__ == '__main__':
from setuptools.command.easy_install import main
main()

2378
Lib/pkg_resources.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,89 @@
Metadata-Version: 1.0
Name: setuptools
Version: 0.7a1dev-r45519
Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
Home-page: http://peak.telecommunity.com/DevCenter/setuptools
Author: Phillip J. Eby
Author-email: peak@eby-sarna.com
License: PSF or ZPL
Description: ``setuptools`` is a collection of enhancements to the Python ``distutils``
(for Python 2.3.5 and up on most platforms; 64-bit platforms require a minimum
of Python 2.4) that allow you to more easily build and distribute Python
packages, especially ones that have dependencies on other packages.
Packages built and distributed using ``setuptools`` look to the user like
ordinary Python packages based on the ``distutils``. Your users don't need to
install or even know about setuptools in order to use them, and you don't
have to include the entire setuptools package in your distributions. By
including just a single `bootstrap module`_ (an 8K .py file), your package will
automatically download and install ``setuptools`` if the user is building your
package from source and doesn't have a suitable version already installed.
.. _bootstrap module: http://peak.telecommunity.com/dist/ez_setup.py
Feature Highlights:
* Automatically find/download/install/upgrade dependencies at build time using
the `EasyInstall tool <http://peak.telecommunity.com/DevCenter/EasyInstall>`_,
which supports downloading via HTTP, FTP, Subversion, and SourceForge, and
automatically scans web pages linked from PyPI to find download links. (It's
the closest thing to CPAN currently available for Python.)
* Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
a single-file importable distribution format
* Include data files inside your package directories, where your code can
actually use them. (Python 2.4 distutils also supports this feature, but
setuptools provides the feature for Python 2.3 packages also, and supports
accessing data files in zipped packages too.)
* Automatically include all packages in your source tree, without listing them
individually in setup.py
* Automatically include all relevant files in your source distributions,
without needing to create a ``MANIFEST.in`` file, and without having to force
regeneration of the ``MANIFEST`` file when your source tree changes.
* Automatically generate wrapper scripts or Windows (console and GUI) .exe
files for any number of "main" functions in your project. (Note: this is not
a py2exe replacement; the .exe files rely on the local Python installation.)
* Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and
still work even when the end-user doesn't have Pyrex installed (as long as
you include the Pyrex-generated C in your source distribution)
* Command aliases - create project-specific, per-user, or site-wide shortcut
names for commonly used commands and options
* PyPI upload support - upload your source distributions and eggs to PyPI
* Deploy your project in "development mode", such that it's available on
``sys.path``, yet can still be edited directly from its source checkout.
* Easily extend the distutils with new commands or ``setup()`` arguments, and
distribute/reuse your extensions for multiple projects, without copying code.
* Create extensible applications and frameworks that automatically discover
extensions, using simple "entry points" declared in a project's setup script.
In addition to the PyPI downloads, the development version of ``setuptools``
is available from the `Python SVN sandbox`_, and in-development versions of the
`0.6 branch`_ are available as well.
.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
Keywords: CPAN PyPI distutils eggs package management
Platform: UNKNOWN
Classifier: Development Status :: 3 - Alpha
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Python Software Foundation License
Classifier: License :: OSI Approved :: Zope Public License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: System :: Archiving :: Packaging
Classifier: Topic :: System :: Systems Administration
Classifier: Topic :: Utilities

View File

@ -0,0 +1,51 @@
[distutils.setup_keywords]
dependency_links = setuptools.dist:assert_string_list
entry_points = setuptools.dist:check_entry_points
extras_require = setuptools.dist:check_extras
package_data = setuptools.dist:check_package_data
install_requires = setuptools.dist:check_requirements
include_package_data = setuptools.dist:assert_bool
exclude_package_data = setuptools.dist:check_package_data
namespace_packages = setuptools.dist:check_nsp
test_suite = setuptools.dist:check_test_suite
eager_resources = setuptools.dist:assert_string_list
zip_safe = setuptools.dist:assert_bool
test_loader = setuptools.dist:check_importable
tests_require = setuptools.dist:check_requirements
[setuptools.file_finders]
svn_cvs = setuptools.command.sdist:_default_revctrl
[egg_info.writers]
dependency_links.txt = setuptools.command.egg_info:overwrite_arg
requires.txt = setuptools.command.egg_info:write_requirements
PKG-INFO = setuptools.command.egg_info:write_pkg_info
eager_resources.txt = setuptools.command.egg_info:overwrite_arg
top_level.txt = setuptools.command.egg_info:write_toplevel_names
namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
entry_points.txt = setuptools.command.egg_info:write_entries
depends.txt = setuptools.command.egg_info:warn_depends_obsolete
[console_scripts]
easy_install = setuptools.command.easy_install:main
easy_install-2.5 = setuptools.command.easy_install:main
[distutils.commands]
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
rotate = setuptools.command.rotate:rotate
develop = setuptools.command.develop:develop
setopt = setuptools.command.setopt:setopt
build_py = setuptools.command.build_py:build_py
saveopts = setuptools.command.saveopts:saveopts
egg_info = setuptools.command.egg_info:egg_info
install_egg_info = setuptools.command.install_egg_info:install_egg_info
alias = setuptools.command.alias:alias
easy_install = setuptools.command.easy_install:easy_install
install_scripts = setuptools.command.install_scripts:install_scripts
bdist_egg = setuptools.command.bdist_egg:bdist_egg
install = setuptools.command.install:install
test = setuptools.command.test:test
install_lib = setuptools.command.install_lib:install_lib
build_ext = setuptools.command.build_ext:build_ext
sdist = setuptools.command.sdist:sdist

View File

@ -0,0 +1,3 @@
easy_install
pkg_resources
setuptools

View File

View File

@ -0,0 +1,82 @@
"""Extensions to the 'distutils' for large or complex distributions"""
from setuptools.extension import Extension, Library
from setuptools.dist import Distribution, Feature, _get_unpatched
import distutils.core, setuptools.command
from setuptools.depends import Require
from distutils.core import Command as _Command
from distutils.util import convert_path
import os.path
__version__ = '0.7a1'
__all__ = [
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
'find_packages'
]
bootstrap_install_from = None
def find_packages(where='.', exclude=()):
"""Return a list all Python packages found within directory 'where'
'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
will be converted to the appropriate local path syntax. 'exclude' is a
sequence of package names to exclude; '*' can be used as a wildcard in the
names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
'foo' itself).
"""
out = []
stack=[(convert_path(where), '')]
while stack:
where,prefix = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where,name)
if (os.path.isdir(fn) and
os.path.isfile(os.path.join(fn,'__init__.py'))
):
out.append(prefix+name); stack.append((fn,prefix+name+'.'))
for pat in exclude:
from fnmatch import fnmatchcase
out = [item for item in out if not fnmatchcase(item,pat)]
return out
setup = distutils.core.setup
_Command = _get_unpatched(_Command)
class Command(_Command):
__doc__ = _Command.__doc__
command_consumes_arguments = False
def __init__(self, dist, **kw):
# Add support for keyword arguments
_Command.__init__(self,dist)
for k,v in kw.items():
setattr(self,k,v)
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
for k,v in kw.items():
setattr(cmd,k,v) # update command with keywords
return cmd
import distutils.core
distutils.core.Command = Command # we can't patch distutils.cmd, alas

205
Lib/setuptools/archive_util.py Executable file
View File

@ -0,0 +1,205 @@
"""Utilities for extracting common archive formats"""
__all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
]
import zipfile, tarfile, os, shutil
from pkg_resources import ensure_directory
from distutils.errors import DistutilsError
class UnrecognizedFormat(DistutilsError):
"""Couldn't recognize the archive type"""
def default_filter(src,dst):
"""The default progress/filter callback; returns True for all files"""
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
drivers=None
):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
"""
for driver in drivers or extraction_drivers:
try:
driver(filename, extract_dir, progress_filter)
except UnrecognizedFormat:
continue
else:
return
else:
raise UnrecognizedFormat(
"Not a recognized archive type: %s" % filename
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory
"""
if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % (filename,))
paths = {filename:('',extract_dir)}
for base, dirs, files in os.walk(filename):
src,dst = paths[base]
for d in dirs:
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
for f in files:
name = src+f
target = os.path.join(dst,f)
target = progress_filter(src+f, target)
if not target:
continue # skip non-files
ensure_directory(target)
f = os.path.join(base,f)
shutil.copyfile(f, target)
shutil.copystat(f, target)
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack zip `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
if not zipfile.is_zipfile(filename):
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
z = zipfile.ZipFile(filename)
try:
for info in z.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name:
continue
target = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target:
continue
if name.endswith('/'):
# directory
ensure_directory(target)
else:
# file
ensure_directory(target)
data = z.read(info.filename)
f = open(target,'wb')
try:
f.write(data)
finally:
f.close()
del data
finally:
z.close()
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
)
try:
tarobj.chown = lambda *args: None # don't do any chowning!
for member in tarobj:
if member.isfile() or member.isdir():
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name:
dst = os.path.join(extract_dir, *name.split('/'))
dst = progress_filter(name, dst)
if dst:
if dst.endswith(os.sep):
dst = dst[:-1]
tarobj._extract_member(member,dst) # XXX Ugh
return True
finally:
tarobj.close()
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile

BIN
Lib/setuptools/cli.exe Executable file

Binary file not shown.

View File

@ -0,0 +1,19 @@
__all__ = [
'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
]
import sys
if sys.version>='2.5':
# In Python 2.5 and above, distutils includes its own upload command
__all__.remove('upload')
from distutils.command.bdist import bdist
if 'egg' not in bdist.format_commands:
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
bdist.format_commands.append('egg')
del bdist, sys

82
Lib/setuptools/command/alias.py Executable file
View File

@ -0,0 +1,82 @@
import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
from setuptools.command.setopt import edit_config, option_base, config_file
def shquote(arg):
"""Quote an argument for later parsing by shlex.split()"""
for c in '"', "'", "\\", "#":
if c in arg: return repr(arg)
if arg.split()<>[arg]:
return repr(arg)
return arg
class alias(option_base):
"""Define a shortcut that invokes one or more commands"""
description = "define a shortcut to invoke one or more commands"
command_consumes_arguments = True
user_options = [
('remove', 'r', 'remove (unset) the alias'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.args = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.remove and len(self.args)<>1:
raise DistutilsOptionError(
"Must specify exactly one argument (the alias name) when "
"using --remove"
)
def run(self):
aliases = self.distribution.get_option_dict('aliases')
if not self.args:
print "Command Aliases"
print "---------------"
for alias in aliases:
print "setup.py alias", format_alias(alias, aliases)
return
elif len(self.args)==1:
alias, = self.args
if self.remove:
command = None
elif alias in aliases:
print "setup.py alias", format_alias(alias, aliases)
return
else:
print "No alias definition found for %r" % alias
return
else:
alias = self.args[0]
command = ' '.join(map(shquote,self.args[1:]))
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
def format_alias(name, aliases):
source, command = aliases[name]
if source == config_file('global'):
source = '--global-config '
elif source == config_file('user'):
source = '--user-config '
elif source == config_file('local'):
source = ''
else:
source = '--filename=%r' % source
return source+name+' '+command

View File

@ -0,0 +1,451 @@
"""setuptools.command.bdist_egg
Build .egg distributions"""
# This module should be kept compatible with Python 2.3
import sys, os, marshal
from setuptools import Command
from distutils.dir_util import remove_tree, mkpath
from distutils.sysconfig import get_python_version, get_python_lib
from distutils import log
from pkg_resources import get_platform, Distribution
from types import CodeType
from setuptools.extension import Library
def write_stub(resource, pyfile):
f = open(pyfile,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __loader__, __file__",
" import sys, pkg_resources, imp",
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% resource,
" del __bootstrap__, __loader__",
" imp.load_dynamic(__name__,__file__)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
# stub __init__.py for packages distributed without one
NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
class bdist_egg(Command):
description = "create an \"egg\" distribution"
user_options = [
('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('exclude-source-files', None,
"remove all .py files from the generated egg"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
]
boolean_options = [
'keep-temp', 'skip-build', 'exclude-source-files'
]
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.egg_output = None
self.exclude_source_files = None
def finalize_options(self):
ei_cmd = self.get_finalized_command("egg_info")
self.egg_info = ei_cmd.egg_info
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'egg')
if self.plat_name is None:
self.plat_name = get_platform()
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
if self.egg_output is None:
# Compute filename of the output egg
basename = Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
get_python_version(),
self.distribution.has_ext_modules() and self.plat_name
).egg_name()
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
def do_install_data(self):
# Hack for packages that install data to install's --install-lib
self.get_finalized_command('install').install_lib = self.bdist_dir
site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
old, self.distribution.data_files = self.distribution.data_files,[]
for item in old:
if isinstance(item,tuple) and len(item)==2:
if os.path.isabs(item[0]):
realpath = os.path.realpath(item[0])
normalized = os.path.normcase(realpath)
if normalized==site_packages or normalized.startswith(
site_packages+os.sep
):
item = realpath[len(site_packages)+1:], item[1]
# XXX else: raise ???
self.distribution.data_files.append(item)
try:
log.info("installing package data to %s" % self.bdist_dir)
self.call_command('install_data', force=0, root=None)
finally:
self.distribution.data_files = old
def get_outputs(self):
return [self.egg_output]
def call_command(self,cmdname,**kw):
"""Invoke reinitialized command `cmdname` with keyword args"""
for dirname in INSTALL_DIRECTORY_ATTRS:
kw.setdefault(dirname,self.bdist_dir)
kw.setdefault('skip_build',self.skip_build)
kw.setdefault('dry_run', self.dry_run)
cmd = self.reinitialize_command(cmdname, **kw)
self.run_command(cmdname)
return cmd
def run(self):
# Generate metadata first
self.run_command("egg_info")
# We run install_lib before install_data, because some data hacks
# pull their data path from the install_lib command.
log.info("installing library code to %s" % self.bdist_dir)
instcmd = self.get_finalized_command('install')
old_root = instcmd.root; instcmd.root = None
cmd = self.call_command('install_lib', warn_dir=0)
instcmd.root = old_root
all_outputs, ext_outputs = self.get_ext_outputs()
self.stubs = []
to_compile = []
for (p,ext_name) in enumerate(ext_outputs):
filename,ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, filename + '.py')
self.stubs.append(pyfile)
log.info("creating stub loader for %s" % ext_name)
if not self.dry_run:
write_stub(os.path.basename(ext_name), pyfile)
to_compile.append(pyfile)
ext_outputs[p] = ext_name.replace(os.sep,'/')
to_compile.extend(self.make_init_files())
if to_compile:
cmd.byte_compile(to_compile)
if self.distribution.data_files:
self.do_install_data()
# Make the EGG-INFO directory
archive_root = self.bdist_dir
egg_info = os.path.join(archive_root,'EGG-INFO')
self.mkpath(egg_info)
if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s" % script_dir)
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
native_libs = os.path.join(self.egg_info,"native_libs.txt")
if all_outputs:
log.info("writing %s" % native_libs)
if not self.dry_run:
libs_file = open(native_libs, 'wt')
libs_file.write('\n'.join(all_outputs))
libs_file.write('\n')
libs_file.close()
elif os.path.isfile(native_libs):
log.info("removing %s" % native_libs)
if not self.dry_run:
os.unlink(native_libs)
for filename in os.listdir(self.egg_info):
path = os.path.join(self.egg_info,filename)
if os.path.isfile(path):
self.copy_file(path,os.path.join(egg_info,filename))
write_safety_flag(
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
)
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
log.warn(
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
if self.exclude_source_files:
self.zap_pyfiles()
# Make the archive
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
dry_run=self.dry_run)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution,'dist_files',[]).append(
('bdist_egg',get_python_version(),self.egg_output))
def zap_pyfiles(self):
log.info("Removing .py files from temporary directory")
for base,dirs,files in walk_egg(self.bdist_dir):
for name in files:
if name.endswith('.py'):
path = os.path.join(base,name)
log.debug("Deleting %s", path)
os.unlink(path)
def zip_safe(self):
safe = getattr(self.distribution,'zip_safe',None)
if safe is not None:
return safe
log.warn("zip_safe flag not set; analyzing archive contents...")
return analyze_egg(self.bdist_dir, self.stubs)
def make_init_files(self):
"""Create missing package __init__ files"""
init_files = []
for base,dirs,files in walk_egg(self.bdist_dir):
if base==self.bdist_dir:
# don't put an __init__ in the root
continue
for name in files:
if name.endswith('.py'):
if '__init__.py' not in files:
pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
if self.distribution.has_contents_for(pkg):
log.warn("Creating missing __init__.py for %s",pkg)
filename = os.path.join(base,'__init__.py')
if not self.dry_run:
f = open(filename,'w'); f.write(NS_PKG_STUB)
f.close()
init_files.append(filename)
break
else:
# not a package, don't traverse to subdirectories
dirs[:] = []
return init_files
def get_ext_outputs(self):
"""Get a list of relative paths to C extensions in the output distro"""
all_outputs = []
ext_outputs = []
paths = {self.bdist_dir:''}
for base, dirs, files in os.walk(self.bdist_dir):
for filename in files:
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
all_outputs.append(paths[base]+filename)
for filename in dirs:
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
if self.distribution.has_ext_modules():
build_cmd = self.get_finalized_command('build_ext')
for ext in build_cmd.extensions:
if isinstance(ext,Library):
continue
fullname = build_cmd.get_ext_fullname(ext.name)
filename = build_cmd.get_ext_filename(fullname)
if not os.path.basename(filename).startswith('dl-'):
if os.path.exists(os.path.join(self.bdist_dir,filename)):
ext_outputs.append(filename)
return all_outputs, ext_outputs
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
def walk_egg(egg_dir):
"""Walk an unpacked egg's contents, skipping the metadata directory"""
walker = os.walk(egg_dir)
base,dirs,files = walker.next()
if 'EGG-INFO' in dirs:
dirs.remove('EGG-INFO')
yield base,dirs,files
for bdf in walker:
yield bdf
def analyze_egg(egg_dir, stubs):
# check for existing flag in EGG-INFO
for flag,fn in safety_flags.items():
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
return flag
safe = True
for base, dirs, files in walk_egg(egg_dir):
for name in files:
if name.endswith('.py') or name.endswith('.pyw'):
continue
elif name.endswith('.pyc') or name.endswith('.pyo'):
# always scan, even if we already know we're not safe
safe = scan_module(egg_dir, base, name, stubs) and safe
return safe
def write_safety_flag(egg_dir, safe):
# Write or remove zip safety flag file(s)
for flag,fn in safety_flags.items():
fn = os.path.join(egg_dir, fn)
if os.path.exists(fn):
if safe is None or bool(safe)<>flag:
os.unlink(fn)
elif safe is not None and bool(safe)==flag:
open(fn,'w').close()
safety_flags = {
True: 'zip-safe',
False: 'not-zip-safe',
}
def scan_module(egg_dir, base, name, stubs):
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
filename = os.path.join(base,name)
if filename[:-1] in stubs:
return True # Extension module
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
f = open(filename,'rb'); f.read(8) # skip magic & date
code = marshal.load(f); f.close()
safe = True
symbols = dict.fromkeys(iter_symbols(code))
for bad in ['__file__', '__path__']:
if bad in symbols:
log.warn("%s: module references %s", module, bad)
safe = False
if 'inspect' in symbols:
for bad in [
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
'getinnerframes', 'getouterframes', 'stack', 'trace'
]:
if bad in symbols:
log.warn("%s: module MAY be using inspect.%s", module, bad)
safe = False
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
if get_python_version()>="2.4":
log.warn("%s: top-level module may be 'python -m' script", module)
safe = False
return safe
def iter_symbols(code):
"""Yield names and strings used by `code` and its nested code objects"""
for name in code.co_names: yield name
for const in code.co_consts:
if isinstance(const,basestring):
yield const
elif isinstance(const,CodeType):
for name in iter_symbols(const):
yield name
# Attribute names of options for commands that might need to be convinced to
# install to the egg build directory
INSTALL_DIRECTORY_ATTRS = [
'install_lib', 'install_dir', 'install_data', 'install_base'
]
def make_zipfile (zip_filename, base_dir, verbose=0, dry_run=0, compress=None):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
and found on the default search path). If neither tool is available,
raises DistutilsExecError. Returns the name of the output zip file.
"""
import zipfile
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
def visit (z, dirname, names):
for name in names:
path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path):
p = path[len(base_dir)+1:]
if not dry_run:
z.write(path, p)
log.debug("adding '%s'" % p)
if compress is None:
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
if not dry_run:
z = zipfile.ZipFile(zip_filename, "w", compression=compression)
os.path.walk(base_dir, visit, z)
z.close()
else:
os.path.walk(base_dir, visit, None)
return zip_filename

View File

@ -0,0 +1,68 @@
# This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, and another kludge to allow you to build old-style non-egg RPMs
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
class bdist_rpm(_bdist_rpm):
def initialize_options(self):
_bdist_rpm.initialize_options(self)
self.no_egg = None
def run(self):
self.run_command('egg_info') # ensure distro name is up-to-date
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version '+version
line24 = '%define version '+rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23,line24)
for line in spec
]
spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
return spec

View File

@ -0,0 +1,287 @@
from distutils.command.build_ext import build_ext as _du_build_ext
try:
# Attempt to use Pyrex for building extensions, if available
from Pyrex.Distutils.build_ext import build_ext as _build_ext
except ImportError:
_build_ext = _du_build_ext
import os, sys
from distutils.file_util import copy_file
from setuptools.extension import Library
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler, get_config_var
get_config_var("LDSHARED") # make sure _config_vars is initialized
from distutils.sysconfig import _config_vars
from distutils import log
from distutils.errors import *
have_rtld = False
use_stubs = False
libtype = 'shared'
if sys.platform == "darwin":
use_stubs = True
elif os.name != 'nt':
try:
from dl import RTLD_NOW
have_rtld = True
use_stubs = True
except ImportError:
pass
def if_dl(s):
if have_rtld:
return s
return ''
class build_ext(_build_ext):
def run(self):
"""Build extensions in build directory, then copy if --inplace"""
old_inplace, self.inplace = self.inplace, 0
_build_ext.run(self)
self.inplace = old_inplace
if old_inplace:
self.copy_extensions_to_source()
def copy_extensions_to_source(self):
build_py = self.get_finalized_command('build_py')
for ext in self.extensions:
fullname = self.get_ext_fullname(ext.name)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
dest_filename = os.path.join(package_dir,os.path.basename(filename))
src_filename = os.path.join(self.build_lib,filename)
# Always copy, even if source is older than destination, to ensure
# that the right extensions for the current Python/platform are
# used.
copy_file(
src_filename, dest_filename, verbose=self.verbose,
dry_run=self.dry_run
)
if ext._needs_stub:
self.write_stub(package_dir or os.curdir, ext, True)
if _build_ext is not _du_build_ext:
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
def swig_sources(self, sources, *otherargs):
# first do any Pyrex processing
sources = _build_ext.swig_sources(self, sources) or sources
# Then do any actual SWIG stuff on the remainder
return _du_build_ext.swig_sources(self, sources, *otherargs)
def get_ext_filename(self, fullname):
filename = _build_ext.get_ext_filename(self,fullname)
ext = self.ext_map[fullname]
if isinstance(ext,Library):
fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn,libtype)
elif use_stubs and ext._links_to_dynamic:
d,fn = os.path.split(filename)
return os.path.join(d,'dl-'+fn)
else:
return filename
def initialize_options(self):
_build_ext.initialize_options(self)
self.shlib_compiler = None
self.shlibs = []
self.ext_map = {}
def finalize_options(self):
_build_ext.finalize_options(self)
self.extensions = self.extensions or []
self.check_extensions_list(self.extensions)
self.shlibs = [ext for ext in self.extensions
if isinstance(ext,Library)]
if self.shlibs:
self.setup_shlib_compiler()
for ext in self.extensions:
fullname = ext._full_name = self.get_ext_fullname(ext.name)
self.ext_map[fullname] = ext
ltd = ext._links_to_dynamic = \
self.shlibs and self.links_to_dynamic(ext) or False
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
filename = ext._file_name = self.get_ext_filename(fullname)
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
if ltd and libdir not in ext.library_dirs:
ext.library_dirs.append(libdir)
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
ext.runtime_library_dirs.append(os.curdir)
def setup_shlib_compiler(self):
compiler = self.shlib_compiler = new_compiler(
compiler=self.compiler, dry_run=self.dry_run, force=self.force
)
if sys.platform == "darwin":
tmp = _config_vars.copy()
try:
# XXX Help! I don't have any idea whether these are right...
_config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
_config_vars['CCSHARED'] = " -dynamiclib"
_config_vars['SO'] = ".dylib"
customize_compiler(compiler)
finally:
_config_vars.clear()
_config_vars.update(tmp)
else:
customize_compiler(compiler)
if self.include_dirs is not None:
compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
compiler.undefine_macro(macro)
if self.libraries is not None:
compiler.set_libraries(self.libraries)
if self.library_dirs is not None:
compiler.set_library_dirs(self.library_dirs)
if self.rpath is not None:
compiler.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
compiler.set_link_objects(self.link_objects)
# hack so distutils' build_extension() builds a library instead
compiler.link_shared_object = link_shared_object.__get__(compiler)
def get_export_symbols(self, ext):
if isinstance(ext,Library):
return ext.export_symbols
return _build_ext.get_export_symbols(self,ext)
def build_extension(self, ext):
_compiler = self.compiler
try:
if isinstance(ext,Library):
self.compiler = self.shlib_compiler
_build_ext.build_extension(self,ext)
if ext._needs_stub:
self.write_stub(
self.get_finalized_command('build_py').build_lib, ext
)
finally:
self.compiler = _compiler
def links_to_dynamic(self, ext):
"""Return true if 'ext' links to a dynamic lib in the same package"""
# XXX this should check to ensure the lib is actually being built
# XXX as dynamic, and not just using a locally-found version or a
# XXX static-compiled version
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
for libname in ext.libraries:
if pkg+libname in libnames: return True
return False
def get_outputs(self):
outputs = _build_ext.get_outputs(self)
optimize = self.get_finalized_command('build_py').optimize
for ext in self.extensions:
if ext._needs_stub:
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
outputs.append(base+'.py')
outputs.append(base+'.pyc')
if optimize:
outputs.append(base+'.pyo')
return outputs
def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
if compile and os.path.exists(stub_file):
raise DistutilsError(stub_file+" already exists! Please delete.")
if not self.dry_run:
f = open(stub_file,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% os.path.basename(ext._file_name),
" del __bootstrap__",
" if '__loader__' in globals():",
" del __loader__",
if_dl(" old_flags = sys.getdlopenflags()"),
" old_dir = os.getcwd()",
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
if compile:
from distutils.util import byte_compile
byte_compile([stub_file], optimize=0,
force=True, dry_run=self.dry_run)
optimize = self.get_finalized_command('install_lib').optimize
if optimize > 0:
byte_compile([stub_file], optimize=optimize,
force=True, dry_run=self.dry_run)
if os.path.exists(stub_file) and not self.dry_run:
os.unlink(stub_file)
if use_stubs or os.name=='nt':
# Build shared libraries
#
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
): self.link(
self.SHARED_LIBRARY, objects, output_libname,
output_dir, libraries, library_dirs, runtime_library_dirs,
export_symbols, debug, extra_preargs, extra_postargs,
build_temp, target_lang
)
else:
# Build static libraries everywhere else
libtype = 'static'
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
):
# XXX we need to either disallow these attrs on Library instances,
# or warn/abort here if set, or something...
#libraries=None, library_dirs=None, runtime_library_dirs=None,
#export_symbols=None, extra_preargs=None, extra_postargs=None,
#build_temp=None
assert output_dir is None # distutils build_ext doesn't pass this
output_dir,filename = os.path.split(output_libname)
basename, ext = os.path.splitext(filename)
if self.library_filename("x").startswith('lib'):
# strip 'lib' prefix; this is kludgy if some platform uses
# a different prefix
basename = basename[3:]
self.create_static_lib(
objects, basename, output_dir, debug, target_lang
)

View File

@ -0,0 +1,205 @@
import os.path, sys, fnmatch
from distutils.command.build_py import build_py as _build_py
from distutils.util import convert_path
from glob import glob
class build_py(_build_py):
"""Enhanced 'build_py' command that includes data files with packages
The data files are specified via a 'package_data' argument to 'setup()'.
See 'setuptools.dist.Distribution' for more details.
Also, this version of the 'build_py' command allows you to specify both
'py_modules' and 'packages' in the same setup operation.
"""
def finalize_options(self):
_build_py.finalize_options(self)
self.package_data = self.distribution.package_data
self.exclude_package_data = self.distribution.exclude_package_data or {}
if 'data_files' in self.__dict__: del self.__dict__['data_files']
def run(self):
"""Build modules, packages, and copy data files to build directory"""
if not self.py_modules and not self.packages:
return
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
# Only compile actual .py files, using our base class' idea of what our
# output files are.
self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
def __getattr__(self,attr):
if attr=='data_files': # lazily compute data files
self.data_files = files = self._get_data_files(); return files
return _build_py.__getattr__(self,attr)
def _get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
self.analyze_manifest()
data = []
for package in self.packages or ():
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = len(src_dir)+1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
data.append( (package, src_dir, build_dir, filenames) )
return data
def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'"""
globs = (self.package_data.get('', [])
+ self.package_data.get(package, []))
files = self.manifest_files.get(package, [])[:]
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
return self.exclude_data_files(package, src_dir, files)
def build_package_data(self):
"""Copy data files into build directory"""
lastdir = None
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
self.copy_file(os.path.join(src_dir, filename), target)
def analyze_manifest(self):
self.manifest_files = mf = {}
if not self.distribution.include_package_data:
return
src_dirs = {}
for package in self.packages or ():
# Locate package source directory
src_dirs[assert_relative(self.get_package_dir(package))] = package
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
for path in ei_cmd.filelist.files:
if path.endswith('.py'):
continue
d,f = os.path.split(assert_relative(path))
prev = None
while d and d!=prev and d not in src_dirs:
prev = d
d, df = os.path.split(d)
f = os.path.join(df, f)
if d in src_dirs:
mf.setdefault(src_dirs[d],[]).append(path)
def get_data_files(self): pass # kludge 2.4 for lazy computation
if sys.version<"2.4": # Python 2.4 already has this code
def get_outputs(self, include_bytecode=1):
"""Return complete list of files copied to the build directory
This includes both '.py' files and data files, as well as '.pyc'
and '.pyo' files if 'include_bytecode' is true. (This method is
needed for the 'install_lib' command to do its job properly, and to
generate a correct installation manifest.)
"""
return _build_py.get_outputs(self, include_bytecode) + [
os.path.join(build_dir, filename)
for package, src_dir, build_dir,filenames in self.data_files
for filename in filenames
]
def check_package(self, package, package_dir):
"""Check namespace packages' __init__ for declare_namespace"""
try:
return self.packages_checked[package]
except KeyError:
pass
init_py = _build_py.check_package(self, package, package_dir)
self.packages_checked[package] = init_py
if not init_py or not self.distribution.namespace_packages:
return init_py
for pkg in self.distribution.namespace_packages:
if pkg==package or pkg.startswith(package+'.'):
break
else:
return init_py
f = open(init_py,'rU')
if 'declare_namespace' not in f.read():
from distutils.errors import DistutilsError
raise DistutilsError(
"Namespace package problem: %s is a namespace package, but its\n"
"__init__.py does not call declare_namespace()! Please fix it.\n"
'(See the setuptools manual under "Namespace Packages" for '
"details.)\n" % (package,)
)
f.close()
return init_py
def initialize_options(self):
self.packages_checked={}
_build_py.initialize_options(self)
def exclude_data_files(self, package, src_dir, files):
"""Filter filenames for package's data files in 'src_dir'"""
globs = (self.exclude_package_data.get('', [])
+ self.exclude_package_data.get(package, []))
bad = []
for pattern in globs:
bad.extend(
fnmatch.filter(
files, os.path.join(src_dir, convert_path(pattern))
)
)
bad = dict.fromkeys(bad)
return [f for f in files if f not in bad]
def assert_relative(path):
if not os.path.isabs(path):
return path
from distutils.errors import DistutilsSetupError
raise DistutilsSetupError(
"""Error: setup script specifies an absolute path:
%s
setup() arguments must *always* be /-separated paths relative to the
setup.py directory, *never* absolute paths.
""" % path
)

123
Lib/setuptools/command/develop.py Executable file
View File

@ -0,0 +1,123 @@
from setuptools.command.easy_install import easy_install
from distutils.util import convert_path
from pkg_resources import Distribution, PathMetadata, normalize_path
from distutils import log
from distutils.errors import *
import sys, os
class develop(easy_install):
"""Set up package for development"""
description = "install package in 'development mode'"
user_options = easy_install.user_options + [
("uninstall", "u", "Uninstall this source package"),
]
boolean_options = easy_install.boolean_options + ['uninstall']
command_consumes_arguments = False # override base
def run(self):
if self.uninstall:
self.multi_version = True
self.uninstall_link()
else:
self.install_for_development()
self.warn_deprecated_options()
def initialize_options(self):
self.uninstall = None
easy_install.initialize_options(self)
def finalize_options(self):
ei = self.get_finalized_command("egg_info")
if ei.broken_egg_info:
raise DistutilsError(
"Please rename %r to %r before using 'develop'"
% (ei.egg_info, ei.broken_egg_info)
)
self.args = [ei.egg_name]
easy_install.finalize_options(self)
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
self.egg_base = ei.egg_base
self.egg_path = os.path.abspath(ei.egg_base)
# Make a distribution for the package's source
self.dist = Distribution(
normalize_path(self.egg_path),
PathMetadata(self.egg_path, os.path.abspath(ei.egg_info)),
project_name = ei.egg_name
)
def install_for_development(self):
# Ensure metadata is up-to-date
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
self.install_site_py() # ensure that target dir is site-safe
# create an .egg-link in the installation dir, pointing to our egg
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
if not self.dry_run:
f = open(self.egg_link,"w")
f.write(self.egg_path)
f.close()
# postprocess the installed distro, fixing up .pth, installing scripts,
# and handling requirements
self.process_distribution(None, self.dist)
def uninstall_link(self):
if os.path.exists(self.egg_link):
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
contents = [line.rstrip() for line in file(self.egg_link)]
if contents != [self.egg_path]:
log.warn("Link points to %s: uninstall aborted", contents)
return
if not self.dry_run:
os.unlink(self.egg_link)
if not self.dry_run:
self.update_pth(self.dist) # remove any .pth link to us
if self.distribution.scripts:
# XXX should also check for entry point scripts!
log.warn("Note: you must uninstall or replace scripts manually!")
def install_egg_scripts(self, dist):
if dist is not self.dist:
# Installing a dependency, so fall back to normal behavior
return easy_install.install_egg_scripts(self,dist)
# create wrapper scripts in the script dir, pointing to dist.scripts
# new-style...
self.install_wrapper_scripts(dist)
# ...and old-style
for script_name in self.distribution.scripts or []:
script_path = os.path.abspath(convert_path(script_name))
script_name = os.path.basename(script_path)
f = open(script_path,'rU')
script_text = f.read()
f.close()
self.install_script(dist, script_name, script_text, script_path)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,369 @@
"""setuptools.command.egg_info
Create a distribution's .egg-info directory and contents"""
# This module should be kept compatible with Python 2.3
import os, re
from setuptools import Command
from distutils.errors import *
from distutils import log
from setuptools.command.sdist import sdist
from distutils import file_util
from distutils.util import convert_path
from distutils.filelist import FileList
from pkg_resources import parse_requirements, safe_name, parse_version, \
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
from sdist import walk_revctrl
class egg_info(Command):
description = "create a distribution's .egg-info directory"
user_options = [
('egg-base=', 'e', "directory containing .egg-info directories"
" (default: top of the source tree)"),
('tag-svn-revision', 'r',
"Add subversion revision ID to version number"),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"),
]
boolean_options = ['tag-date','tag-svn-revision']
def initialize_options (self):
self.egg_name = None
self.egg_version = None
self.egg_base = None
self.egg_info = None
self.tag_build = None
self.tag_svn_revision = 0
self.tag_date = 0
self.broken_egg_info = False
def finalize_options (self):
self.egg_name = safe_name(self.distribution.get_name())
self.egg_version = self.tagged_version()
try:
list(
parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
)
except ValueError:
raise DistutilsOptionError(
"Invalid distribution name or version syntax: %s-%s" %
(self.egg_name,self.egg_version)
)
if self.egg_base is None:
dirs = self.distribution.package_dir
self.egg_base = (dirs or {}).get('',os.curdir)
self.ensure_dirname('egg_base')
self.egg_info = to_filename(self.egg_name)+'.egg-info'
if self.egg_base != os.curdir:
self.egg_info = os.path.join(self.egg_base, self.egg_info)
if '-' in self.egg_name: self.check_broken_egg_info()
# Set package version for the benefit of dumber commands
# (e.g. sdist, bdist_wininst, etc.)
#
self.distribution.metadata.version = self.egg_version
# If we bootstrapped around the lack of a PKG-INFO, as might be the
# case in a fresh checkout, make sure that any special tags get added
# to the version info
#
pd = self.distribution._patched_dist
if pd is not None and pd.key==self.egg_name.lower():
pd._version = self.egg_version
pd._parsed_version = parse_version(self.egg_version)
self.distribution._patched_dist = None
def write_or_delete_file(self, what, filename, data, force=False):
"""Write `data` to `filename` or delete if empty
If `data` is non-empty, this routine is the same as ``write_file()``.
If `data` is empty but not ``None``, this is the same as calling
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
unless `filename` exists, in which case a warning is issued about the
orphaned file (if `force` is false), or deleted (if `force` is true).
"""
if data:
self.write_file(what, filename, data)
elif os.path.exists(filename):
if data is None and not force:
log.warn(
"%s not set in setup(), but %s exists", what, filename
)
return
else:
self.delete_file(filename)
def write_file(self, what, filename, data):
"""Write `data` to `filename` (if not a dry run) after announcing it
`what` is used in a log message to identify what is being written
to the file.
"""
log.info("writing %s to %s", what, filename)
if not self.dry_run:
f = open(filename, 'wb')
f.write(data)
f.close()
def delete_file(self, filename):
"""Delete `filename` (if not a dry run) after announcing it"""
log.info("deleting %s", filename)
if not self.dry_run:
os.unlink(filename)
def run(self):
self.mkpath(self.egg_info)
installer = self.distribution.fetch_build_egg
for ep in iter_entry_points('egg_info.writers'):
writer = ep.load(installer=installer)
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
self.find_sources()
def tagged_version(self):
version = self.distribution.get_version()
if self.tag_build:
version+=self.tag_build
if self.tag_svn_revision and (
os.path.exists('.svn') or os.path.exists('PKG-INFO')
): version += '-r%s' % self.get_svn_revision()
if self.tag_date:
import time; version += time.strftime("-%Y%m%d")
return safe_version(version)
def get_svn_revision(self):
revision = 0
urlre = re.compile('url="([^"]+)"')
revre = re.compile('committed-rev="(\d+)"')
for base,dirs,files in os.walk(os.curdir):
if '.svn' not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove('.svn')
f = open(os.path.join(base,'.svn','entries'))
data = f.read()
f.close()
dirurl = urlre.search(data).group(1) # get repository URL
if base==os.curdir:
base_url = dirurl+'/' # save the root url
elif not dirurl.startswith(base_url):
dirs[:] = []
continue # not part of the same svn tree, skip it
for match in revre.finditer(data):
revision = max(revision, int(match.group(1)))
return str(revision or get_pkg_info_revision())
def find_sources(self):
"""Generate SOURCES.txt manifest file"""
manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
mm = manifest_maker(self.distribution)
mm.manifest = manifest_filename
mm.run()
self.filelist = mm.filelist
def check_broken_egg_info(self):
bei = self.egg_name+'.egg-info'
if self.egg_base != os.curdir:
bei = os.path.join(self.egg_base, bei)
if os.path.exists(bei):
log.warn(
"-"*78+'\n'
"Note: Your current .egg-info directory has a '-' in its name;"
'\nthis will not work correctly with "setup.py develop".\n\n'
'Please rename %s to %s to correct this problem.\n'+'-'*78,
bei, self.egg_info
)
self.broken_egg_info = self.egg_info
self.egg_info = bei # make it work for now
class FileList(FileList):
"""File list that accepts only existing, platform-independent paths"""
def append(self, item):
path = convert_path(item)
if os.path.exists(path):
self.files.append(path)
class manifest_maker(sdist):
template = "MANIFEST.in"
def initialize_options (self):
self.use_defaults = 1
self.prune = 1
self.manifest_only = 1
self.force_manifest = 1
def finalize_options(self):
pass
def run(self):
self.filelist = FileList()
if not os.path.exists(self.manifest):
self.write_manifest() # it must exist so it'll get in the list
self.filelist.findall()
self.add_defaults()
if os.path.exists(self.template):
self.read_template()
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest()
def write_manifest (self):
"""Write the file list in 'self.filelist' (presumably as filled in
by 'add_defaults()' and 'read_template()') to the manifest file
named by 'self.manifest'.
"""
files = self.filelist.files
if os.sep!='/':
files = [f.replace(os.sep,'/') for f in files]
self.execute(file_util.write_file, (self.manifest, files),
"writing manifest file '%s'" % self.manifest)
def add_defaults(self):
sdist.add_defaults(self)
self.filelist.append(self.template)
self.filelist.append(self.manifest)
rcfiles = list(walk_revctrl())
if rcfiles:
self.filelist.extend(rcfiles)
elif os.path.exists(self.manifest):
self.read_manifest()
ei_cmd = self.get_finalized_command('egg_info')
self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
def prune_file_list (self):
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
sep = re.escape(os.sep)
self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
def write_pkg_info(cmd, basename, filename):
log.info("writing %s", filename)
if not cmd.dry_run:
metadata = cmd.distribution.metadata
metadata.version, oldver = cmd.egg_version, metadata.version
metadata.name, oldname = cmd.egg_name, metadata.name
try:
# write unescaped data to PKG-INFO, so older pkg_resources
# can still parse it
metadata.write_pkg_info(cmd.egg_info)
finally:
metadata.name, metadata.version = oldname, oldver
safe = getattr(cmd.distribution,'zip_safe',None)
import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
def warn_depends_obsolete(cmd, basename, filename):
if os.path.exists(filename):
log.warn(
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
def write_requirements(cmd, basename, filename):
dist = cmd.distribution
data = ['\n'.join(yield_lines(dist.install_requires or ()))]
for extra,reqs in (dist.extras_require or {}).items():
data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
cmd.write_or_delete_file("requirements", filename, ''.join(data))
def write_toplevel_names(cmd, basename, filename):
pkgs = dict.fromkeys(
[k.split('.',1)[0]
for k in cmd.distribution.iter_distribution_names()
]
)
cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
def overwrite_arg(cmd, basename, filename):
write_arg(cmd, basename, filename, True)
def write_arg(cmd, basename, filename, force=False):
argname = os.path.splitext(basename)[0]
value = getattr(cmd.distribution, argname, None)
if value is not None:
value = '\n'.join(value)+'\n'
cmd.write_or_delete_file(argname, filename, value, force)
def write_entries(cmd, basename, filename):
ep = cmd.distribution.entry_points
if isinstance(ep,basestring) or ep is None:
data = ep
elif ep is not None:
data = []
for section, contents in ep.items():
if not isinstance(contents,basestring):
contents = EntryPoint.parse_group(section, contents)
contents = '\n'.join(map(str,contents.values()))
data.append('[%s]\n%s\n\n' % (section,contents))
data = ''.join(data)
cmd.write_or_delete_file('entry points', filename, data, True)
def get_pkg_info_revision():
# See if we can get a -r### off of PKG-INFO, in case this is an sdist of
# a subversion revision
#
if os.path.exists('PKG-INFO'):
f = open('PKG-INFO','rU')
for line in f:
match = re.match(r"Version:.*-r(\d+)\s*$", line)
if match:
return int(match.group(1))
return 0

View File

@ -0,0 +1,123 @@
import setuptools, sys
from distutils.command.install import install as _install
from distutils.errors import DistutilsArgError
class install(_install):
"""Use easy_install to install the package, w/dependencies"""
user_options = _install.user_options + [
('old-and-unmanageable', None, "Try not to use this!"),
('single-version-externally-managed', None,
"used by system package builders to create 'flat' eggs"),
]
boolean_options = _install.boolean_options + [
'old-and-unmanageable', 'single-version-externally-managed',
]
new_commands = [
('install_egg_info', lambda self: True),
('install_scripts', lambda self: True),
]
_nc = dict(new_commands)
sub_commands = [
cmd for cmd in _install.sub_commands if cmd[0] not in _nc
] + new_commands
def initialize_options(self):
_install.initialize_options(self)
self.old_and_unmanageable = None
self.single_version_externally_managed = None
self.no_compile = None # make DISTUTILS_DEBUG work right!
def finalize_options(self):
_install.finalize_options(self)
if self.root:
self.single_version_externally_managed = True
elif self.single_version_externally_managed:
if not self.root and not self.record:
raise DistutilsArgError(
"You must specify --record or --root when building system"
" packages"
)
def handle_extra_path(self):
# We always ignore extra_path, because we install as .egg or .egg-info
self.path_file = None
self.extra_dirs = ''
def run(self):
# Explicit request for old-style install? Just do it
if self.old_and_unmanageable or self.single_version_externally_managed:
return _install.run(self)
# Attempt to detect whether we were called from setup() or by another
# command. If we were called by setup(), our caller will be the
# 'run_command' method in 'distutils.dist', and *its* caller will be
# the 'run_commands' method. If we were called any other way, our
# immediate caller *might* be 'run_command', but it won't have been
# called by 'run_commands'. This is slightly kludgy, but seems to
# work.
#
caller = sys._getframe(2)
caller_module = caller.f_globals.get('__name__','')
caller_name = caller.f_code.co_name
if caller_module != 'distutils.dist' or caller_name!='run_commands':
# We weren't called from the command line or setup(), so we
# should run in backward-compatibility mode to support bdist_*
# commands.
_install.run(self)
else:
self.do_egg_install()
def do_egg_install(self):
from setuptools.command.easy_install import easy_install
cmd = easy_install(
self.distribution, args="x", root=self.root, record=self.record,
)
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
self.run_command('bdist_egg')
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
if setuptools.bootstrap_install_from:
# Bootstrap self-installation of setuptools
args.insert(0, setuptools.bootstrap_install_from)
cmd.args = args
cmd.run()
setuptools.bootstrap_install_from = None

View File

@ -0,0 +1,82 @@
from setuptools import Command
from setuptools.archive_util import unpack_archive
from distutils import log, dir_util
import os, shutil, pkg_resources
class install_egg_info(Command):
"""Install an .egg-info directory for the package"""
description = "Install an .egg-info directory for the package"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',('install_dir','install_dir'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name()+'.egg-info'
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = [self.target]
def run(self):
self.run_command('egg_info')
target = self.target
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink,(self.target,),"Removing "+self.target)
if not self.dry_run:
pkg_resources.ensure_directory(self.target)
self.execute(self.copytree, (),
"Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
def get_outputs(self):
return self.outputs
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src,dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/','CVS/':
if src.startswith(skip) or '/'+skip in src:
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
def install_namespaces(self):
nsp = (self.distribution.namespace_packages or [])[:]
if not nsp: return
nsp.sort() # set up shorter names first
filename,ext = os.path.splitext(self.target)
filename += '-nspkg.pth'; self.outputs.append(filename)
log.info("Installing %s",filename)
if not self.dry_run:
f = open(filename,'wb')
for pkg in nsp:
pth = tuple(pkg.split('.'))
f.write(
"import sys,new,os; "
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
"*%(pth)r); "
"ie = os.path.exists(os.path.join(p,'__init__.py')); "
"m = not ie and "
"sys.modules.setdefault(%(pkg)r,new.module(%(pkg)r)); "
"mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
"(p not in mp) and mp.append(p)\n"
% locals()
)
f.close()

View File

@ -0,0 +1,82 @@
from distutils.command.install_lib import install_lib as _install_lib
import os
class install_lib(_install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def _bytecode_filenames (self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
if not py_file.endswith('.py'):
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
exclude = {}
nsp = self.distribution.namespace_packages
if (nsp and self.get_finalized_command('install')
.single_version_externally_managed
):
for pkg in nsp:
parts = pkg.split('.')
while parts:
pkgdir = os.path.join(self.install_dir, *parts)
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
exclude[os.path.join(pkgdir,f)] = 1
parts.pop()
return exclude
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return _install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = _install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs

View File

@ -0,0 +1,82 @@
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args, sys_executable
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def initialize_options(self):
_install_scripts.initialize_options(self)
self.no_ep = False
def run(self):
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
if self.no_ep:
# don't install entry point scripts into .egg file!
return
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
bs_cmd = self.get_finalized_command('build_scripts')
executable = getattr(bs_cmd,'executable',sys_executable)
for args in get_script_args(dist, executable):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass

View File

@ -0,0 +1,82 @@
import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
class rotate(Command):
"""Delete older distributions"""
description = "delete older distributions, keeping N newest files"
user_options = [
('match=', 'm', "patterns to match (required)"),
('dist-dir=', 'd', "directory where the distributions are"),
('keep=', 'k', "number of matching distributions to keep"),
]
boolean_options = []
def initialize_options(self):
self.match = None
self.dist_dir = None
self.keep = None
def finalize_options(self):
if self.match is None:
raise DistutilsOptionError(
"Must specify one or more (comma-separated) match patterns "
"(e.g. '.zip' or '.egg')"
)
if self.keep is None:
raise DistutilsOptionError("Must specify number of files to keep")
try:
self.keep = int(self.keep)
except ValueError:
raise DistutilsOptionError("--keep must be an integer")
if isinstance(self.match, basestring):
self.match = [
convert_path(p.strip()) for p in self.match.split(',')
]
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
def run(self):
self.run_command("egg_info")
from glob import glob
for pattern in self.match:
pattern = self.distribution.get_name()+'*'+pattern
files = glob(os.path.join(self.dist_dir,pattern))
files = [(os.path.getmtime(f),f) for f in files]
files.sort()
files.reverse()
log.info("%d file(s) matching %s", len(files), pattern)
files = files[self.keep:]
for (t,f) in files:
log.info("Deleting %s", f)
if not self.dry_run:
os.unlink(f)

View File

@ -0,0 +1,25 @@
import distutils, os
from setuptools import Command
from setuptools.command.setopt import edit_config, option_base
class saveopts(option_base):
"""Save command-line options to a file"""
description = "save supplied options to setup.cfg or other config file"
def run(self):
dist = self.distribution
commands = dist.command_options.keys()
settings = {}
for cmd in commands:
if cmd=='saveopts':
continue # don't save our own options!
for opt,(src,val) in dist.get_option_dict(cmd).items():
if src=="command line":
settings.setdefault(cmd,{})[opt] = val
edit_config(self.filename, settings, self.dry_run)

164
Lib/setuptools/command/sdist.py Executable file
View File

@ -0,0 +1,164 @@
from distutils.command.sdist import sdist as _sdist
from distutils.util import convert_path
import os, re, sys, pkg_resources
entities = [
("&lt;","<"), ("&gt;", ">"), ("&quot;", '"'), ("&apos;", "'"),
("&amp;", "&")
]
def unescape(data):
for old,new in entities:
data = data.replace(old,new)
return data
def re_finder(pattern, postproc=None):
def find(dirname, filename):
f = open(filename,'rU')
data = f.read()
f.close()
for match in pattern.finditer(data):
path = match.group(1)
if postproc:
path = postproc(path)
yield joinpath(dirname,path)
return find
def joinpath(prefix,suffix):
if not prefix:
return suffix
return os.path.join(prefix,suffix)
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
for item in ep.load()(dirname):
yield item
def _default_revctrl(dirname=''):
for path, finder in finders:
path = joinpath(dirname,path)
if os.path.isfile(path):
for path in finder(dirname,path):
if os.path.isfile(path):
yield path
elif os.path.isdir(path):
for item in _default_revctrl(path):
yield item
def externals_finder(dirname, filename):
"""Find any 'svn:externals' directories"""
found = False
f = open(filename,'rb')
for line in iter(f.readline, ''): # can't use direct iter!
parts = line.split()
if len(parts)==2:
kind,length = parts
data = f.read(int(length))
if kind=='K' and data=='svn:externals':
found = True
elif kind=='V' and found:
f.close()
break
else:
f.close()
return
for line in data.splitlines():
parts = line.split()
if parts:
yield joinpath(dirname, parts[0])
finders = [
(convert_path('CVS/Entries'),
re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
(convert_path('.svn/entries'),
re_finder(
re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I),
unescape
)
),
(convert_path('.svn/dir-props'), externals_finder),
]
class sdist(_sdist):
"""Smart sdist that finds anything supported by revision control"""
user_options = [
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
]
negative_opt = {}
def run(self):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
self.filelist = ei_cmd.filelist
self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
self.check_metadata()
self.make_distribution()
dist_files = getattr(self.distribution,'dist_files',[])
for file in self.archive_files:
data = ('sdist', '', file)
if data not in dist_files:
dist_files.append(data)
def read_template(self):
try:
_sdist.read_template(self)
except:
# grody hack to close the template file (MANIFEST.in)
# this prevents easy_install's attempt at deleting the file from
# dying and thus masking the real error
sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
raise

164
Lib/setuptools/command/setopt.py Executable file
View File

@ -0,0 +1,164 @@
import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
def config_file(kind="local"):
"""Get the filename of the distutils, local, global, or per-user config
`kind` must be one of "local", "global", or "user"
"""
if kind=='local':
return 'setup.cfg'
if kind=='global':
return os.path.join(
os.path.dirname(distutils.__file__),'distutils.cfg'
)
if kind=='user':
dot = os.name=='posix' and '.' or ''
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
raise ValueError(
"config_file() type must be 'local', 'global', or 'user'", kind
)
def edit_config(filename, settings, dry_run=False):
"""Edit a configuration file to include `settings`
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
command/section name. A ``None`` value means to delete the entire section,
while a dictionary lists settings to be changed or deleted in that section.
A setting of ``None`` means to delete that setting.
"""
from ConfigParser import RawConfigParser
log.debug("Reading configuration from %s", filename)
opts = RawConfigParser()
opts.read([filename])
for section, options in settings.items():
if options is None:
log.info("Deleting section [%s] from %s", section, filename)
opts.remove_section(section)
else:
if not opts.has_section(section):
log.debug("Adding new section [%s] to %s", section, filename)
opts.add_section(section)
for option,value in options.items():
if value is None:
log.debug("Deleting %s.%s from %s",
section, option, filename
)
opts.remove_option(section,option)
if not opts.options(section):
log.info("Deleting empty [%s] section from %s",
section, filename)
opts.remove_section(section)
else:
log.debug(
"Setting %s.%s to %r in %s",
section, option, value, filename
)
opts.set(section,option,value)
log.info("Writing %s", filename)
if not dry_run:
f = open(filename,'w'); opts.write(f); f.close()
class option_base(Command):
"""Abstract base class for commands that mess with config files"""
user_options = [
('global-config', 'g',
"save options to the site-wide distutils.cfg file"),
('user-config', 'u',
"save options to the current user's pydistutils.cfg file"),
('filename=', 'f',
"configuration file to use (default=setup.cfg)"),
]
boolean_options = [
'global-config', 'user-config',
]
def initialize_options(self):
self.global_config = None
self.user_config = None
self.filename = None
def finalize_options(self):
filenames = []
if self.global_config:
filenames.append(config_file('global'))
if self.user_config:
filenames.append(config_file('user'))
if self.filename is not None:
filenames.append(self.filename)
if not filenames:
filenames.append(config_file('local'))
if len(filenames)>1:
raise DistutilsOptionError(
"Must specify only one configuration file option",
filenames
)
self.filename, = filenames
class setopt(option_base):
"""Save command-line options to a file"""
description = "set an option in setup.cfg or another config file"
user_options = [
('command=', 'c', 'command to set an option for'),
('option=', 'o', 'option to set'),
('set-value=', 's', 'value of the option'),
('remove', 'r', 'remove (unset) the value'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.command = None
self.option = None
self.set_value = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.command is None or self.option is None:
raise DistutilsOptionError("Must specify --command *and* --option")
if self.set_value is None and not self.remove:
raise DistutilsOptionError("Must specify --set-value or --remove")
def run(self):
edit_config(
self.filename, {
self.command: {self.option.replace('-','_'):self.set_value}
},
self.dry_run
)

View File

@ -0,0 +1,123 @@
from setuptools import Command
from distutils.errors import DistutilsOptionError
import sys
from pkg_resources import *
from unittest import TestLoader, main
class ScanningLoader(TestLoader):
def loadTestsFromModule(self, module):
"""Return a suite of all tests cases contained in the given module
If the module is a package, load tests from all the modules in it.
If the module has an ``additional_tests`` function, call it and add
the return value to the tests.
"""
tests = []
if module.__name__!='setuptools.tests.doctest': # ugh
tests.append(TestLoader.loadTestsFromModule(self,module))
if hasattr(module, "additional_tests"):
tests.append(module.additional_tests())
if hasattr(module, '__path__'):
for file in resource_listdir(module.__name__, ''):
if file.endswith('.py') and file!='__init__.py':
submodule = module.__name__+'.'+file[:-3]
else:
if resource_exists(
module.__name__, file+'/__init__.py'
):
submodule = module.__name__+'.'+file
else:
continue
tests.append(self.loadTestsFromName(submodule))
if len(tests)!=1:
return self.suiteClass(tests)
else:
return tests[0] # don't create a nested suite for only one return
class test(Command):
"""Command to run unit tests after in-place build"""
description = "run unit tests after in-place build"
user_options = [
('test-module=','m', "Run 'test_suite' in specified module"),
('test-suite=','s',
"Test suite to run (e.g. 'some_module.test_suite')"),
]
def initialize_options(self):
self.test_suite = None
self.test_module = None
self.test_loader = None
def finalize_options(self):
if self.test_suite is None:
if self.test_module is None:
self.test_suite = self.distribution.test_suite
else:
self.test_suite = self.test_module+".test_suite"
elif self.test_module:
raise DistutilsOptionError(
"You may specify a module or a suite, but not both"
)
self.test_args = [self.test_suite]
if self.verbose:
self.test_args.insert(0,'--verbose')
if self.test_loader is None:
self.test_loader = getattr(self.distribution,'test_loader',None)
if self.test_loader is None:
self.test_loader = "setuptools.command.test:ScanningLoader"
def run(self):
# Ensure metadata is up-to-date
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.test_suite:
cmd = ' '.join(self.test_args)
if self.dry_run:
self.announce('skipping "unittest %s" (dry run)' % cmd)
else:
self.announce('running "unittest %s"' % cmd)
self.run_tests()
def run_tests(self):
import unittest
old_path = sys.path[:]
ei_cmd = self.get_finalized_command("egg_info")
path_item = normalize_path(ei_cmd.egg_base)
metadata = PathMetadata(
path_item, normalize_path(ei_cmd.egg_info)
)
dist = Distribution(path_item, metadata, project_name=ei_cmd.egg_name)
working_set.add(dist)
require(str(dist.as_requirement()))
loader_ep = EntryPoint.parse("x="+self.test_loader)
loader_class = loader_ep.load(require=False)
unittest.main(
None, None, [unittest.__file__]+self.test_args,
testLoader = loader_class()
)

178
Lib/setuptools/command/upload.py Executable file
View File

@ -0,0 +1,178 @@
"""distutils.command.upload
Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
from distutils.errors import *
from distutils.core import Command
from distutils.spawn import spawn
from distutils import log
from md5 import md5
import os
import socket
import platform
import ConfigParser
import httplib
import base64
import urlparse
import cStringIO as StringIO
class upload(Command):
description = "upload binary package to PyPI"
DEFAULT_REPOSITORY = 'http://www.python.org/pypi'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('sign', 's',
'sign files to upload using gpg'),
('identity=', 'i', 'GPG identity used to sign files'),
]
boolean_options = ['show-response', 'sign']
def initialize_options(self):
self.username = ''
self.password = ''
self.repository = ''
self.show_response = 0
self.sign = False
self.identity = None
def finalize_options(self):
if self.identity and not self.sign:
raise DistutilsOptionError(
"Must use --sign for --identity to have meaning"
)
if os.environ.has_key('HOME'):
rc = os.path.join(os.environ['HOME'], '.pypirc')
if os.path.exists(rc):
self.announce('Using PyPI login from %s' % rc)
config = ConfigParser.ConfigParser({
'username':'',
'password':'',
'repository':''})
config.read(rc)
if not self.repository:
self.repository = config.get('server-login', 'repository')
if not self.username:
self.username = config.get('server-login', 'username')
if not self.password:
self.password = config.get('server-login', 'password')
if not self.repository:
self.repository = self.DEFAULT_REPOSITORY
def run(self):
if not self.distribution.dist_files:
raise DistutilsOptionError("No dist file created in earlier command")
for command, pyversion, filename in self.distribution.dist_files:
self.upload_file(command, pyversion, filename)
def upload_file(self, command, pyversion, filename):
# Sign if requested
if self.sign:
gpg_args = ["gpg", "--detach-sign", "-a", filename]
if self.identity:
gpg_args[2:2] = ["--local-user", self.identity]
spawn(gpg_args,
dry_run=self.dry_run)
# Fill in the data
content = open(filename,'rb').read()
basename = os.path.basename(filename)
comment = ''
if command=='bdist_egg' and self.distribution.has_ext_modules():
comment = "built on %s" % platform.platform(terse=1)
data = {
':action':'file_upload',
'protcol_version':'1',
'name':self.distribution.get_name(),
'version':self.distribution.get_version(),
'content':(basename,content),
'filetype':command,
'pyversion':pyversion,
'md5_digest':md5(content).hexdigest(),
}
if command == 'bdist_rpm':
dist, version, id = platform.dist()
if dist:
comment = 'built for %s %s' % (dist, version)
elif command == 'bdist_dumb':
comment = 'built for %s' % platform.platform(terse=1)
data['comment'] = comment
if self.sign:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
open(filename+".asc").read())
# set up the authentication
auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = StringIO.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if type(value) != type([]):
value = [value]
for value in value:
if type(value) is tuple:
fn = ';filename="%s"' % value[0]
value = value[1]
else:
fn = ""
value = str(value)
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write(fn)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
http = httplib.HTTPConnection(netloc)
elif schema == 'https':
http = httplib.HTTPSConnection(netloc)
else:
raise AssertionError, "unsupported schema "+schema
data = ''
loglevel = log.INFO
try:
http.connect()
http.putrequest("POST", url)
http.putheader('Content-type',
'multipart/form-data; boundary=%s'%boundary)
http.putheader('Content-length', str(len(body)))
http.putheader('Authorization', auth)
http.endheaders()
http.send(body)
except socket.error, e:
self.announce(e.msg, log.ERROR)
return
r = http.getresponse()
if r.status == 200:
self.announce('Server response (%s): %s' % (r.status, r.reason),
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print '-'*75, r.read(), '-'*75

246
Lib/setuptools/depends.py Normal file
View File

@ -0,0 +1,246 @@
from __future__ import generators
import sys, imp, marshal
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from distutils.version import StrictVersion, LooseVersion
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self,name,requested_version,module,homepage='',
attribute=None,format=None
):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name,self.requested_version)
return self.name
def version_ok(self,version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version)<>"unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f,p,i = find_module(self.module,paths)
if f: f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module,self.attribute,default,paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self,paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self,paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def _iter_code(code):
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
from array import array
from dis import HAVE_ARGUMENT, EXTENDED_ARG
bytes = array('b',code.co_code)
eof = len(code.co_code)
ptr = 0
extended_arg = 0
while ptr<eof:
op = bytes[ptr]
if op>=HAVE_ARGUMENT:
arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
ptr += 3
if op==EXTENDED_ARG:
extended_arg = arg * 65536L
continue
else:
arg = None
ptr += 1
yield op,arg
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
if kind==PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts,module))
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix,mode,kind) = find_module(module,paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind==PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind==PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind==PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module,f,path,(suffix,mode,kind))
return getattr(sys.modules[module],symbol,None)
finally:
if f:
f.close()
return extract_constant(code,symbol,default)
def extract_constant(code,symbol,default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for op, arg in _iter_code(code):
if op==LOAD_CONST:
const = code.co_consts[arg]
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
return const
else:
const = default

820
Lib/setuptools/dist.py Normal file
View File

@ -0,0 +1,820 @@
__all__ = ['Distribution']
from distutils.core import Distribution as _Distribution
from setuptools.depends import Require
from setuptools.command.install import install
from setuptools.command.sdist import sdist
from setuptools.command.install_lib import install_lib
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
from distutils.errors import DistutilsSetupError
import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
import os
def _get_unpatched(cls):
"""Protect against re-patching the distutils if reloaded
Also ensures that no other distutils extension monkeypatched the distutils
first.
"""
while cls.__module__.startswith('setuptools'):
cls, = cls.__bases__
if not cls.__module__.startswith('distutils'):
raise AssertionError(
"distutils has already been patched by %r" % cls
)
return cls
_Distribution = _get_unpatched(_Distribution)
sequence = tuple, list
def check_importable(dist, attr, value):
try:
ep = pkg_resources.EntryPoint.parse('x='+value)
assert not ep.extras
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be importable 'module:attrs' string (got %r)"
% (attr,value)
)
def assert_string_list(dist, attr, value):
"""Verify that value is a string list or None"""
try:
assert ''.join(value)!=value
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be a list of strings (got %r)" % (attr,value)
)
def check_nsp(dist, attr, value):
"""Verify that namespace packages are valid"""
assert_string_list(dist,attr,value)
for nsp in value:
if not dist.has_contents_for(nsp):
raise DistutilsSetupError(
"Distribution contains no modules or packages for " +
"namespace package %r" % nsp
)
def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
for k,v in value.items():
list(pkg_resources.parse_requirements(v))
except (TypeError,ValueError,AttributeError):
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
)
def assert_bool(dist, attr, value):
"""Verify that value is True, False, 0, or 1"""
if bool(value) != value:
raise DistutilsSetupError(
"%r must be a boolean value (got %r)" % (attr,value)
)
def check_requirements(dist, attr, value):
"""Verify that install_requires is a valid requirements list"""
try:
list(pkg_resources.parse_requirements(value))
except (TypeError,ValueError):
raise DistutilsSetupError(
"%r must be a string or list of strings "
"containing valid project/version requirement specifiers" % (attr,)
)
def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
pkg_resources.EntryPoint.parse_map(value)
except ValueError, e:
raise DistutilsSetupError(e)
def check_test_suite(dist, attr, value):
if not isinstance(value,basestring):
raise DistutilsSetupError("test_suite must be a string")
def check_package_data(dist, attr, value):
"""Verify that value is a dictionary of package names to glob lists"""
if isinstance(value,dict):
for k,v in value.items():
if not isinstance(k,str): break
try: iter(v)
except TypeError:
break
else:
return
raise DistutilsSetupError(
attr+" must be a dictionary mapping package names to lists of "
"wildcard patterns"
)
class Distribution(_Distribution):
"""Distribution with support for features, tests, and package data
This is an enhanced version of 'distutils.dist.Distribution' that
effectively adds the following new optional keyword arguments to 'setup()':
'install_requires' -- a string or sequence of strings specifying project
versions that the distribution requires when installed, in the format
used by 'pkg_resources.require()'. They will be installed
automatically when the package is installed. If you wish to use
packages that are not available in PyPI, or want to give your users an
alternate download location, you can add a 'find_links' option to the
'[easy_install]' section of your project's 'setup.cfg' file, and then
setuptools will scan the listed web pages for links that satisfy the
requirements.
'extras_require' -- a dictionary mapping names of optional "extras" to the
additional requirement(s) that using those extras incurs. For example,
this::
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
indicates that the distribution can optionally provide an extra
capability called "reST", but it can only be used if docutils and
reSTedit are installed. If the user installs your package using
EasyInstall and requests one of your extras, the corresponding
additional requirements will be installed if needed.
'features' -- a dictionary mapping option names to 'setuptools.Feature'
objects. Features are a portion of the distribution that can be
included or excluded based on user options, inter-feature dependencies,
and availability on the current system. Excluded features are omitted
from all setup commands, including source and binary distributions, so
you can create multiple distributions from the same source tree.
Feature names should be valid Python identifiers, except that they may
contain the '-' (minus) sign. Features can be included or excluded
via the command line options '--with-X' and '--without-X', where 'X' is
the name of the feature. Whether a feature is included by default, and
whether you are allowed to control this from the command line, is
determined by the Feature object. See the 'Feature' class for more
information.
'test_suite' -- the name of a test suite to run for the 'test' command.
If the user runs 'python setup.py test', the package will be installed,
and the named test suite will be run. The format is the same as
would be used on a 'unittest.py' command line. That is, it is the
dotted name of an object to import and call to generate a test suite.
'package_data' -- a dictionary mapping package names to lists of filenames
or globs to use to find data files contained in the named packages.
If the dictionary has filenames or globs listed under '""' (the empty
string), those names will be searched for in every package, in addition
to any names for the specific package. Data files found using these
names/globs will be installed along with the package, in the same
location as the package. Note that globs are allowed to reference
the contents of non-package subdirectories, as long as you use '/' as
a path separator. (Globs are automatically converted to
platform-specific paths at runtime.)
In addition to these new keywords, this class also has several new methods
for manipulating the distribution's contents. For example, the 'include()'
and 'exclude()' methods can be thought of as in-place add and subtract
commands that add or remove packages, modules, extensions, and so on from
the distribution. They are used by the feature subsystem to configure the
distribution for the included and excluded features.
"""
_patched_dist = None
def patch_missing_pkg_info(self, attrs):
# Fake up a replacement for the data that would normally come from
# PKG-INFO, but which might not yet be built if this is a fresh
# checkout.
#
if not attrs or 'name' not in attrs or 'version' not in attrs:
return
key = pkg_resources.safe_name(str(attrs['name'])).lower()
dist = pkg_resources.working_set.by_key.get(key)
if dist is not None and not dist.has_metadata('PKG-INFO'):
dist._version = pkg_resources.safe_version(str(attrs['version']))
self._patched_dist = dist
def __init__ (self, attrs=None):
have_package_data = hasattr(self, "package_data")
if not have_package_data:
self.package_data = {}
self.require_features = []
self.features = {}
self.dist_files = []
self.patch_missing_pkg_info(attrs)
# Make sure we have any eggs needed to interpret 'attrs'
if attrs and 'dependency_links' in attrs:
self.dependency_links = attrs.pop('dependency_links')
assert_string_list(self,'dependency_links',self.dependency_links)
if attrs and 'setup_requires' in attrs:
self.fetch_build_eggs(attrs.pop('setup_requires'))
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
if not hasattr(self,ep.name):
setattr(self,ep.name,None)
_Distribution.__init__(self,attrs)
if isinstance(self.metadata.version, (int,long,float)):
# Some people apparently take "version number" too literally :)
self.metadata.version = str(self.metadata.version)
def parse_command_line(self):
"""Process features after parsing command line options"""
result = _Distribution.parse_command_line(self)
if self.features:
self._finalize_features()
return result
def _feature_attrname(self,name):
"""Convert feature name to corresponding option attribute name"""
return 'with_'+name.replace('-','_')
def fetch_build_eggs(self, requires):
"""Resolve pre-setup requirements"""
from pkg_resources import working_set, parse_requirements
for dist in working_set.resolve(
parse_requirements(requires), installer=self.fetch_build_egg
):
working_set.add(dist)
def finalize_options(self):
_Distribution.finalize_options(self)
if self.features:
self._set_global_opts_from_features()
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
value = getattr(self,ep.name,None)
if value is not None:
ep.require(installer=self.fetch_build_egg)
ep.load()(self, ep.name, value)
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
try:
cmd = self._egg_fetcher
except AttributeError:
from setuptools.command.easy_install import easy_install
dist = self.__class__({'script_args':['easy_install']})
dist.parse_config_files()
opts = dist.get_option_dict('easy_install')
keep = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts'
)
for key in opts.keys():
if key not in keep:
del opts[key] # don't use any other settings
if self.dependency_links:
links = self.dependency_links[:]
if 'find_links' in opts:
links = opts['find_links'][1].split() + links
opts['find_links'] = ('setup', links)
cmd = easy_install(
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report = True
)
cmd.ensure_finalized()
self._egg_fetcher = cmd
return cmd.easy_install(req)
def _set_global_opts_from_features(self):
"""Add --with-X/--without-X options based on optional features"""
go = []
no = self.negative_opt.copy()
for name,feature in self.features.items():
self._set_feature(name,None)
feature.validate(self)
if feature.optional:
descr = feature.description
incdef = ' (default)'
excdef=''
if not feature.include_by_default():
excdef, incdef = incdef, excdef
go.append(('with-'+name, None, 'include '+descr+incdef))
go.append(('without-'+name, None, 'exclude '+descr+excdef))
no['without-'+name] = 'with-'+name
self.global_options = self.feature_options = go + self.global_options
self.negative_opt = self.feature_negopt = no
def _finalize_features(self):
"""Add/remove features and resolve dependencies between them"""
# First, flag all the enabled items (and thus their dependencies)
for name,feature in self.features.items():
enabled = self.feature_is_included(name)
if enabled or (enabled is None and feature.include_by_default()):
feature.include_in(self)
self._set_feature(name,1)
# Then disable the rest, so that off-by-default features don't
# get flagged as errors when they're required by an enabled feature
for name,feature in self.features.items():
if not self.feature_is_included(name):
feature.exclude_from(self)
self._set_feature(name,0)
def get_command_class(self, command):
"""Pluggable version of get_command_class()"""
if command in self.cmdclass:
return self.cmdclass[command]
for ep in pkg_resources.iter_entry_points('distutils.commands',command):
ep.require(installer=self.fetch_build_egg)
self.cmdclass[command] = cmdclass = ep.load()
return cmdclass
else:
return _Distribution.get_command_class(self, command)
def print_commands(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass:
cmdclass = ep.load(False) # don't require extras, we're not running
self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self)
def _set_feature(self,name,status):
"""Set feature's inclusion status"""
setattr(self,self._feature_attrname(name),status)
def feature_is_included(self,name):
"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
return getattr(self,self._feature_attrname(name))
def include_feature(self,name):
"""Request inclusion of feature named 'name'"""
if self.feature_is_included(name)==0:
descr = self.features[name].description
raise DistutilsOptionError(
descr + " is required, but was excluded or is not available"
)
self.features[name].include_in(self)
self._set_feature(name,1)
def include(self,**attrs):
"""Add items to distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
the distribution's 'py_modules' attribute, if it was not already
there.
Currently, this method only supports inclusion for attributes that are
lists or tuples. If you need to add support for adding to other
attributes in this or a subclass, you can add an '_include_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
will try to call 'dist._include_foo({"bar":"baz"})', which can then
handle whatever special inclusion logic is needed.
"""
for k,v in attrs.items():
include = getattr(self, '_include_'+k, None)
if include:
include(v)
else:
self._include_misc(k,v)
def exclude_package(self,package):
"""Remove packages, modules, and extensions in named package"""
pfx = package+'.'
if self.packages:
self.packages = [
p for p in self.packages
if p<>package and not p.startswith(pfx)
]
if self.py_modules:
self.py_modules = [
p for p in self.py_modules
if p<>package and not p.startswith(pfx)
]
if self.ext_modules:
self.ext_modules = [
p for p in self.ext_modules
if p.name<>package and not p.name.startswith(pfx)
]
def has_contents_for(self,package):
"""Return true if 'exclude_package(package)' would do something"""
pfx = package+'.'
for p in self.iter_distribution_names():
if p==package or p.startswith(pfx):
return True
def _exclude_misc(self,name,value):
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list or tuple (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is not None and not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
elif old:
setattr(self,name,[item for item in old if item not in value])
def _include_misc(self,name,value):
"""Handle 'include()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is None:
setattr(self,name,value)
elif not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
else:
setattr(self,name,old+[item for item in value if item not in old])
def exclude(self,**attrs):
"""Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
"""
for k,v in attrs.items():
exclude = getattr(self, '_exclude_'+k, None)
if exclude:
exclude(v)
else:
self._exclude_misc(k,v)
def _exclude_packages(self,packages):
if not isinstance(packages,sequence):
raise DistutilsSetupError(
"packages: setting must be a list or tuple (%r)" % (packages,)
)
map(self.exclude_package, packages)
def _parse_command_opts(self, parser, args):
# Remove --with-X/--without-X options when processing command args
self.global_options = self.__class__.global_options
self.negative_opt = self.__class__.negative_opt
# First, expand any aliases
command = args[0]
aliases = self.get_option_dict('aliases')
while command in aliases:
src,alias = aliases[command]
del aliases[command] # ensure each alias can expand only once!
import shlex
args[:1] = shlex.split(alias,True)
command = args[0]
nargs = _Distribution._parse_command_opts(self, parser, args)
# Handle commands that want to consume all remaining arguments
cmd_class = self.get_command_class(command)
if getattr(cmd_class,'command_consumes_arguments',None):
self.get_option_dict(command)['args'] = ("command line", nargs)
if nargs is not None:
return []
return nargs
def get_cmdline_options(self):
"""Return a '{cmd: {opt:val}}' map of all command-line options
Option names are all long, but do not include the leading '--', and
contain dashes rather than underscores. If the option doesn't take
an argument (e.g. '--quiet'), the 'val' is 'None'.
Note that options provided by config files are intentionally excluded.
"""
d = {}
for cmd,opts in self.command_options.items():
for opt,(src,val) in opts.items():
if src != "command line":
continue
opt = opt.replace('_','-')
if val==0:
cmdobj = self.get_command_obj(cmd)
neg_opt = self.negative_opt.copy()
neg_opt.update(getattr(cmdobj,'negative_opt',{}))
for neg,pos in neg_opt.items():
if pos==opt:
opt=neg
val=None
break
else:
raise AssertionError("Shouldn't be able to get here")
elif val==1:
val = None
d.setdefault(cmd,{})[opt] = val
return d
def iter_distribution_names(self):
"""Yield all packages, modules, and extension names in distribution"""
for pkg in self.packages or ():
yield pkg
for module in self.py_modules or ():
yield module
for ext in self.ext_modules or ():
if isinstance(ext,tuple):
name,buildinfo = ext
yield name
else:
yield ext.name
# Install it throughout the distutils
for module in distutils.dist, distutils.core, distutils.cmd:
module.Distribution = Distribution
class Feature:
"""A subset of the distribution that can be excluded if unneeded/wanted
Features are created using these keyword arguments:
'description' -- a short, human readable description of the feature, to
be used in error messages, and option help messages.
'standard' -- if true, the feature is included by default if it is
available on the current system. Otherwise, the feature is only
included if requested via a command line '--with-X' option, or if
another included feature requires it. The default setting is 'False'.
'available' -- if true, the feature is available for installation on the
current system. The default setting is 'True'.
'optional' -- if true, the feature's inclusion can be controlled from the
command line, using the '--with-X' or '--without-X' options. If
false, the feature's inclusion status is determined automatically,
based on 'availabile', 'standard', and whether any other feature
requires it. The default setting is 'True'.
'require_features' -- a string or sequence of strings naming features
that should also be included if this feature is included. Defaults to
empty list. May also contain 'Require' objects that should be
added/removed from the distribution.
'remove' -- a string or list of strings naming packages to be removed
from the distribution if this feature is *not* included. If the
feature *is* included, this argument is ignored. This argument exists
to support removing features that "crosscut" a distribution, such as
defining a 'tests' feature that removes all the 'tests' subpackages
provided by other features. The default for this argument is an empty
list. (Note: the named package(s) or modules must exist in the base
distribution when the 'setup()' function is initially called.)
other keywords -- any other keyword arguments are saved, and passed to
the distribution's 'include()' and 'exclude()' methods when the
feature is included or excluded, respectively. So, for example, you
could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
added or removed from the distribution as appropriate.
A feature must include at least one 'requires', 'remove', or other
keyword argument. Otherwise, it can't affect the distribution in any way.
Note also that you can subclass 'Feature' to create your own specialized
feature types that modify the distribution in other ways when included or
excluded. See the docstrings for the various methods here for more detail.
Aside from the methods, the only feature attributes that distributions look
at are 'description' and 'optional'.
"""
def __init__(self, description, standard=False, available=True,
optional=True, require_features=(), remove=(), **extras
):
self.description = description
self.standard = standard
self.available = available
self.optional = optional
if isinstance(require_features,(str,Require)):
require_features = require_features,
self.require_features = [
r for r in require_features if isinstance(r,str)
]
er = [r for r in require_features if not isinstance(r,str)]
if er: extras['require_features'] = er
if isinstance(remove,str):
remove = remove,
self.remove = remove
self.extras = extras
if not remove and not require_features and not extras:
raise DistutilsSetupError(
"Feature %s: must define 'require_features', 'remove', or at least one"
" of 'packages', 'py_modules', etc."
)
def include_by_default(self):
"""Should this feature be included by default?"""
return self.available and self.standard
def include_in(self,dist):
"""Ensure feature and its requirements are included in distribution
You may override this in a subclass to perform additional operations on
the distribution. Note that this method may be called more than once
per feature, and so should be idempotent.
"""
if not self.available:
raise DistutilsPlatformError(
self.description+" is required,"
"but is not available on this platform"
)
dist.include(**self.extras)
for f in self.require_features:
dist.include_feature(f)
def exclude_from(self,dist):
"""Ensure feature is excluded from distribution
You may override this in a subclass to perform additional operations on
the distribution. This method will be called at most once per
feature, and only after all included features have been asked to
include themselves.
"""
dist.exclude(**self.extras)
if self.remove:
for item in self.remove:
dist.exclude_package(item)
def validate(self,dist):
"""Verify that feature makes sense in context of distribution
This method is called by the distribution just before it parses its
command line. It checks to ensure that the 'remove' attribute, if any,
contains only valid package/module names that are present in the base
distribution when 'setup()' is called. You may override it in a
subclass to perform any other required validation of the feature
against a target distribution.
"""
for item in self.remove:
if not dist.has_contents_for(item):
raise DistutilsSetupError(
"%s wants to be able to remove %s, but the distribution"
" doesn't contain any packages or modules under %s"
% (self.description, item, item)
)

View File

@ -0,0 +1,36 @@
from distutils.core import Extension as _Extension
from dist import _get_unpatched
_Extension = _get_unpatched(_Extension)
try:
from Pyrex.Distutils.build_ext import build_ext
except ImportError:
have_pyrex = False
else:
have_pyrex = True
class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files"""
if not have_pyrex:
# convert .pyx extensions to .c
def __init__(self,*args,**kw):
_Extension.__init__(self,*args,**kw)
sources = []
for s in self.sources:
if s.endswith('.pyx'):
sources.append(s[:-3]+'c')
else:
sources.append(s)
self.sources = sources
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
import sys, distutils.core, distutils.extension
distutils.core.Extension = Extension
distutils.extension.Extension = Extension
if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = Extension

BIN
Lib/setuptools/gui.exe Executable file

Binary file not shown.

697
Lib/setuptools/package_index.py Executable file
View File

@ -0,0 +1,697 @@
"""PyPI and direct package downloading"""
import sys, os.path, re, urlparse, urllib2, shutil, random, socket
from pkg_resources import *
from distutils import log
from distutils.errors import DistutilsError
from md5 import md5
from fnmatch import translate
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
# this is here to fix emacs' cruddy broken syntax highlighting
PYPI_MD5 = re.compile(
'<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a href="[^?]+\?:action=show_md5'
'&amp;digest=([0-9a-f]{32})">md5</a>\\)'
)
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
__all__ = [
'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
'interpret_distro_name',
]
def parse_bdist_wininst(name):
"""Return (base,pyversion) or (None,None) for possible .exe name"""
lower = name.lower()
base, py_ver = None, None
if lower.endswith('.exe'):
if lower.endswith('.win32.exe'):
base = name[:-10]
elif lower.startswith('.win32-py',-16):
py_ver = name[-7:-4]
base = name[:-16]
return base,py_ver
def egg_info_for_url(url):
scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
base = urllib2.unquote(path.split('/')[-1])
if '#' in base: base, fragment = base.split('#',1)
return base,fragment
def distros_for_url(url, metadata=None):
"""Yield egg or source distribution objects that might be found at a URL"""
base, fragment = egg_info_for_url(url)
dists = distros_for_location(url, base, metadata)
if fragment and not dists:
match = EGG_FRAGMENT.match(fragment)
if match:
return interpret_distro_name(
url, match.group(1), metadata, precedence = CHECKOUT_DIST
)
return dists
def distros_for_location(location, basename, metadata=None):
"""Yield egg or source distribution objects based on basename"""
if basename.endswith('.egg.zip'):
basename = basename[:-4] # strip the .zip
if basename.endswith('.egg'): # only one, unambiguous interpretation
return [Distribution.from_location(location, basename, metadata)]
if basename.endswith('.exe'):
win_base, py_ver = parse_bdist_wininst(basename)
if win_base is not None:
return interpret_distro_name(
location, win_base, metadata, py_ver, BINARY_DIST, "win32"
)
# Try source distro extensions (.zip, .tgz, etc.)
#
for ext in EXTENSIONS:
if basename.endswith(ext):
basename = basename[:-len(ext)]
return interpret_distro_name(location, basename, metadata)
return [] # no extension matched
def distros_for_filename(filename, metadata=None):
"""Yield possible egg or source distribution objects based on a filename"""
return distros_for_location(
normalize_path(filename), os.path.basename(filename), metadata
)
def interpret_distro_name(location, basename, metadata,
py_version=None, precedence=SOURCE_DIST, platform=None
):
"""Generate alternative interpretations of a source distro name
Note: if `location` is a filesystem filename, you should call
``pkg_resources.normalize_path()`` on it before passing it to this
routine!
"""
# Generate alternative interpretations of a source distro name
# Because some packages are ambiguous as to name/versions split
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
# the spurious interpretations should be ignored, because in the event
# there's also an "adns" package, the spurious "python-1.1.0" version will
# compare lower than any numeric version number, and is therefore unlikely
# to match a request for it. It's still a potential problem, though, and
# in the long run PyPI and the distutils should go for "safe" names and
# versions in distribution archive names (sdist and bdist).
parts = basename.split('-')
for p in range(1,len(parts)+1):
yield Distribution(
location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
py_version=py_version, precedence = precedence,
platform = platform
)
class PackageIndex(Environment):
"""A distribution index that scans web pages for download URLs"""
def __init__(self,index_url="http://www.python.org/pypi",hosts=('*',),*args,**kw):
Environment.__init__(self,*args,**kw)
self.index_url = index_url + "/"[:not index_url.endswith('/')]
self.scanned_urls = {}
self.fetched_urls = {}
self.package_pages = {}
self.allows = re.compile('|'.join(map(translate,hosts))).match
self.to_scan = []
def process_url(self, url, retrieve=False):
"""Evaluate a URL as a possible download, and maybe retrieve it"""
url = fix_sf_url(url)
if url in self.scanned_urls and not retrieve:
return
self.scanned_urls[url] = True
if not URL_SCHEME(url):
self.process_filename(url)
return
else:
dists = list(distros_for_url(url))
if dists:
if not self.url_ok(url):
return
self.debug("Found link: %s", url)
if dists or not retrieve or url in self.fetched_urls:
map(self.add, dists)
return # don't need the actual page
if not self.url_ok(url):
self.fetched_urls[url] = True
return
self.info("Reading %s", url)
f = self.open_url(url)
self.fetched_urls[url] = self.fetched_urls[f.url] = True
if 'html' not in f.headers['content-type'].lower():
f.close() # not html, we can't process it
return
base = f.url # handle redirects
page = f.read()
f.close()
if url.startswith(self.index_url):
page = self.process_index(url, page)
for match in HREF.finditer(page):
link = urlparse.urljoin(base, match.group(1))
self.process_url(link)
def process_filename(self, fn, nested=False):
# process filenames or directories
if not os.path.exists(fn):
self.warn("Not found: %s", url)
return
if os.path.isdir(fn) and not nested:
path = os.path.realpath(fn)
for item in os.listdir(path):
self.process_filename(os.path.join(path,item), True)
dists = distros_for_filename(fn)
if dists:
self.debug("Found: %s", fn)
map(self.add, dists)
def url_ok(self, url, fatal=False):
if self.allows(urlparse.urlparse(url)[1]):
return True
msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
if fatal:
raise DistutilsError(msg % url)
else:
self.warn(msg, url)
def process_index(self,url,page):
"""Process the contents of a PyPI page"""
def scan(link):
# Process a URL to see if it's for a package page
if link.startswith(self.index_url):
parts = map(
urllib2.unquote, link[len(self.index_url):].split('/')
)
if len(parts)==2:
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(),{})[link] = True
return to_filename(pkg), to_filename(ver)
return None, None
if url==self.index_url or 'Index of Packages</title>' in page:
# process an index page into the package-page index
for match in HREF.finditer(page):
scan( urlparse.urljoin(url, match.group(1)) )
else:
pkg,ver = scan(url) # ensure this page is in the page index
# process individual package page
for tag in ("<th>Home Page", "<th>Download URL"):
pos = page.find(tag)
if pos!=-1:
match = HREF.search(page,pos)
if match:
# Process the found URL
new_url = urlparse.urljoin(url, match.group(1))
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if pkg and ver:
new_url+='#egg=%s-%s' % (pkg,ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
)
def need_version_info(self, url):
self.scan_all(
"Page at %s links to .py file(s) without version info; an index "
"scan is required.", url
)
def scan_all(self, msg=None, *args):
if self.index_url not in self.fetched_urls:
if msg: self.warn(msg,*args)
self.warn(
"Scanning index of all packages (this may take a while)"
)
self.scan_url(self.index_url)
def find_packages(self, requirement):
self.scan_url(self.index_url + requirement.unsafe_name+'/')
if not self.package_pages.get(requirement.key):
# Fall back to safe version of the name
self.scan_url(self.index_url + requirement.project_name+'/')
if not self.package_pages.get(requirement.key):
# We couldn't find the target package, so search the index page too
self.warn(
"Couldn't find index page for %r (maybe misspelled?)",
requirement.unsafe_name
)
self.scan_all()
for url in self.package_pages.get(requirement.key,()):
# scan each page that might be related to the desired package
self.scan_url(url)
def obtain(self, requirement, installer=None):
self.prescan(); self.find_packages(requirement)
for dist in self[requirement.key]:
if dist in requirement:
return dist
self.debug("%s does not match %s", requirement, dist)
return super(PackageIndex, self).obtain(requirement,installer)
def check_md5(self, cs, info, filename, tfp):
if re.match('md5=[0-9a-f]{32}$', info):
self.debug("Validating md5 checksum for %s", filename)
if cs.hexdigest()<>info[4:]:
tfp.close()
os.unlink(filename)
raise DistutilsError(
"MD5 validation failed for "+os.path.basename(filename)+
"; possible download problem?"
)
def add_find_links(self, urls):
"""Add `urls` to the list that will be prescanned for searches"""
for url in urls:
if (
self.to_scan is None # if we have already "gone online"
or not URL_SCHEME(url) # or it's a local file/directory
or url.startswith('file:')
or list(distros_for_url(url)) # or a direct package link
):
# then go ahead and process it now
self.scan_url(url)
else:
# otherwise, defer retrieval till later
self.to_scan.append(url)
def prescan(self):
"""Scan urls scheduled for prescanning (e.g. --find-links)"""
if self.to_scan:
map(self.scan_url, self.to_scan)
self.to_scan = None # from now on, go ahead and process immediately
def download(self, spec, tmpdir):
"""Locate and/or download `spec` to `tmpdir`, returning a local path
`spec` may be a ``Requirement`` object, or a string containing a URL,
an existing local filename, or a project/version requirement spec
(i.e. the string form of a ``Requirement`` object). If it is the URL
of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
automatically created alongside the downloaded file.
If `spec` is a ``Requirement`` object or a string containing a
project/version requirement spec, this method returns the location of
a matching distribution (possibly after downloading it to `tmpdir`).
If `spec` is a locally existing file or directory name, it is simply
returned unchanged. If `spec` is a URL, it is downloaded to a subpath
of `tmpdir`, and the local filename is returned. Various errors may be
raised if a problem occurs during downloading.
"""
if not isinstance(spec,Requirement):
scheme = URL_SCHEME(spec)
if scheme:
# It's a url, download it to tmpdir
found = self._download_url(scheme.group(1), spec, tmpdir)
base, fragment = egg_info_for_url(spec)
if base.endswith('.py'):
found = self.gen_setup(found,fragment,tmpdir)
return found
elif os.path.exists(spec):
# Existing file or directory, just return it
return spec
else:
try:
spec = Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" %
(spec,)
)
return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
def fetch_distribution(self,
requirement, tmpdir, force_scan=False, source=False, develop_ok=False
):
"""Obtain a distribution suitable for fulfilling `requirement`
`requirement` must be a ``pkg_resources.Requirement`` instance.
If necessary, or if the `force_scan` flag is set, the requirement is
searched for in the (online) package index as well as the locally
installed packages. If a distribution matching `requirement` is found,
the returned distribution's ``location`` is the value you would have
gotten from calling the ``download()`` method with the matching
distribution's URL or filename. If no matching distribution is found,
``None`` is returned.
If the `source` flag is set, only source distributions and source
checkout links will be considered. Unless the `develop_ok` flag is
set, development and system eggs (i.e., those using the ``.egg-info``
format) will be ignored.
"""
# process a Requirement
self.info("Searching for %s", requirement)
skipped = {}
def find(req):
# Find a matching distribution; may be called more than once
for dist in self[req.key]:
if dist.precedence==DEVELOP_DIST and not develop_ok:
if dist not in skipped:
self.warn("Skipping development or system egg: %s",dist)
skipped[dist] = 1
continue
if dist in req and (dist.precedence<=SOURCE_DIST or not source):
self.info("Best match: %s", dist)
return dist.clone(
location=self.download(dist.location, tmpdir)
)
if force_scan:
self.prescan()
self.find_packages(requirement)
dist = find(requirement)
if dist is None and self.to_scan is not None:
self.prescan()
dist = find(requirement)
if dist is None and not force_scan:
self.find_packages(requirement)
dist = find(requirement)
if dist is None:
self.warn(
"No local packages or download links found for %s%s",
(source and "a source distribution of " or ""),
requirement,
)
return dist
def fetch(self, requirement, tmpdir, force_scan=False, source=False):
"""Obtain a file suitable for fulfilling `requirement`
DEPRECATED; use the ``fetch_distribution()`` method now instead. For
backward compatibility, this routine is identical but returns the
``location`` of the downloaded distribution instead of a distribution
object.
"""
dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
if dist is not None:
return dist.location
return None
def gen_setup(self, filename, fragment, tmpdir):
match = EGG_FRAGMENT.match(fragment); #import pdb; pdb.set_trace()
dists = match and [d for d in
interpret_distro_name(filename, match.group(1), None) if d.version
] or []
if len(dists)==1: # unambiguous ``#egg`` fragment
basename = os.path.basename(filename)
# Make sure the file has been downloaded to the temp dir.
if os.path.dirname(filename) != tmpdir:
dst = os.path.join(tmpdir, basename)
from setuptools.command.easy_install import samefile
if not samefile(filename, dst):
shutil.copy2(filename, dst)
filename=dst
file = open(os.path.join(tmpdir, 'setup.py'), 'w')
file.write(
"from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n"
% (
dists[0].project_name, dists[0].version,
os.path.splitext(basename)[0]
)
)
file.close()
return filename
elif match:
raise DistutilsError(
"Can't unambiguously interpret project/version identifier %r; "
"any dashes in the name or version should be escaped using "
"underscores. %r" % (fragment,dists)
)
else:
raise DistutilsError(
"Can't process plain .py files without an '#egg=name-version'"
" suffix to enable automatic setup script generation."
)
dl_blocksize = 8192
def _download_to(self, url, filename):
self.url_ok(url,True) # raises error if not allowed
self.info("Downloading %s", url)
# Download the file
fp, tfp, info = None, None, None
try:
if '#' in url:
url, info = url.split('#', 1)
fp = self.open_url(url)
if isinstance(fp, urllib2.HTTPError):
raise DistutilsError(
"Can't download %s: %s %s" % (url, fp.code,fp.msg)
)
cs = md5()
headers = fp.info()
blocknum = 0
bs = self.dl_blocksize
size = -1
if "content-length" in headers:
size = int(headers["Content-Length"])
self.reporthook(url, filename, blocknum, bs, size)
tfp = open(filename,'wb')
while True:
block = fp.read(bs)
if block:
cs.update(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
if info: self.check_md5(cs, info, filename, tfp)
return headers
finally:
if fp: fp.close()
if tfp: tfp.close()
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
def retry_sf_download(self, url, filename):
try:
return self._download_to(url, filename)
except:
scheme, server, path, param, query, frag = urlparse.urlparse(url)
if server!='dl.sourceforge.net':
raise
mirror = get_sf_ip()
while _sf_mirrors:
self.warn("Download failed: %s", sys.exc_info()[1])
url = urlparse.urlunparse((scheme, mirror, path, param, '', frag))
try:
return self._download_to(url, filename)
except:
_sf_mirrors.remove(mirror) # don't retry the same mirror
mirror = get_sf_ip()
raise # fail if no mirror works
def open_url(self, url):
try:
return urllib2.urlopen(url)
except urllib2.HTTPError, v:
return v
except urllib2.URLError, v:
raise DistutilsError("Download error: %s" % v.reason)
def _download_url(self, scheme, url, tmpdir):
# Determine download filename
#
name = filter(None,urlparse.urlparse(url)[2].split('/'))
if name:
name = name[-1]
while '..' in name:
name = name.replace('..','.').replace('\\','_')
else:
name = "__downloaded__" # default if URL has no path contents
if name.endswith('.egg.zip'):
name = name[:-4] # strip the extra .zip before download
filename = os.path.join(tmpdir,name)
# Download the file
#
if scheme=='svn' or scheme.startswith('svn+'):
return self._download_svn(url, filename)
else:
headers = self.retry_sf_download(url, filename)
if 'html' in headers['content-type'].lower():
return self._download_html(url, headers, filename, tmpdir)
else:
return filename
def scan_url(self, url):
self.process_url(url, True)
def _download_html(self, url, headers, filename, tmpdir):
file = open(filename)
for line in file:
if line.strip():
# Check for a subversion index page
if re.search(r'<title>Revision \d+:', line):
# it's a subversion index page:
file.close()
os.unlink(filename)
return self._download_svn(url, filename)
break # not an index page
file.close()
os.unlink(filename)
raise DistutilsError("Unexpected HTML page found at "+url)
def _download_svn(self, url, filename):
url = url.split('#',1)[0] # remove any fragment for svn's sake
self.info("Doing subversion checkout from %s to %s", url, filename)
os.system("svn checkout -q %s %s" % (url, filename))
return filename
def debug(self, msg, *args):
log.debug(msg, *args)
def info(self, msg, *args):
log.info(msg, *args)
def warn(self, msg, *args):
log.warn(msg, *args)
def fix_sf_url(url):
scheme, server, path, param, query, frag = urlparse.urlparse(url)
if server!='prdownloads.sourceforge.net':
return url
return urlparse.urlunparse(
(scheme, 'dl.sourceforge.net', 'sourceforge'+path, param, '', frag)
)
_sf_mirrors = []
def get_sf_ip():
if not _sf_mirrors:
try:
_sf_mirrors[:] = socket.gethostbyname_ex('dl.sourceforge.net')[-1]
except socket.error:
# DNS-bl0ck1n9 f1r3w4llz sUx0rs!
_sf_mirrors[:] = ['dl.sourceforge.net']
return random.choice(_sf_mirrors)

205
Lib/setuptools/sandbox.py Executable file
View File

@ -0,0 +1,205 @@
import os, sys, __builtin__, tempfile
_os = sys.modules[os.name]
_open = open
from distutils.errors import DistutilsError
__all__ = [
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
]
def run_setup(setup_script, args):
"""Run a distutils setup script, sandboxed in its directory"""
old_dir = os.getcwd()
save_argv = sys.argv[:]
save_path = sys.path[:]
setup_dir = os.path.abspath(os.path.dirname(setup_script))
temp_dir = os.path.join(setup_dir,'temp')
if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
save_tmp = tempfile.tempdir
try:
tempfile.tempdir = temp_dir
os.chdir(setup_dir)
try:
sys.argv[:] = [setup_script]+list(args)
sys.path.insert(0, setup_dir)
DirectorySandbox(setup_dir).run(
lambda: execfile(
"setup.py",
{'__file__':setup_script, '__name__':'__main__'}
)
)
except SystemExit, v:
if v.args and v.args[0]:
raise
# Normal exit, just return
finally:
os.chdir(old_dir)
sys.path[:] = save_path
sys.argv[:] = save_argv
tempfile.tempdir = save_tmp
class AbstractSandbox:
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
_active = False
def __init__(self):
self._attrs = [
name for name in dir(_os)
if not name.startswith('_') and hasattr(self,name)
]
def _copy(self, source):
for name in self._attrs:
setattr(os, name, getattr(source,name))
def run(self, func):
"""Run 'func' under os sandboxing"""
try:
self._copy(self)
__builtin__.open = __builtin__.file = self._open
self._active = True
return func()
finally:
self._active = False
__builtin__.open = __builtin__.file = _open
self._copy(_os)
def _mk_dual_path_wrapper(name):
original = getattr(_os,name)
def wrap(self,src,dst,*args,**kw):
if self._active:
src,dst = self._remap_pair(name,src,dst,*args,**kw)
return original(src,dst,*args,**kw)
return wrap
for name in ["rename", "link", "symlink"]:
if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
def _mk_single_path_wrapper(name, original=None):
original = original or getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return original(path,*args,**kw)
return wrap
_open = _mk_single_path_wrapper('file', _open)
for name in [
"stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
"remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
"startfile", "mkfifo", "mknod", "pathconf", "access"
]:
if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
def _mk_single_with_return(name):
original = getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return self._remap_output(name, original(path,*args,**kw))
return original(path,*args,**kw)
return wrap
for name in ['readlink', 'tempnam']:
if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
def _mk_query(name):
original = getattr(_os,name)
def wrap(self,*args,**kw):
retval = original(*args,**kw)
if self._active:
return self._remap_output(name, retval)
return retval
return wrap
for name in ['getcwd', 'tmpnam']:
if hasattr(_os,name): locals()[name] = _mk_query(name)
def _validate_path(self,path):
"""Called to remap or validate any path, whether input or output"""
return path
def _remap_input(self,operation,path,*args,**kw):
"""Called for path inputs"""
return self._validate_path(path)
def _remap_output(self,operation,path):
"""Called for path outputs"""
return self._validate_path(path)
def _remap_pair(self,operation,src,dst,*args,**kw):
"""Called for path pairs like rename, link, and symlink operations"""
return (
self._remap_input(operation+'-from',src,*args,**kw),
self._remap_input(operation+'-to',dst,*args,**kw)
)
class DirectorySandbox(AbstractSandbox):
"""Restrict operations to a single subdirectory - pseudo-chroot"""
write_ops = dict.fromkeys([
"open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
"utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
])
def __init__(self,sandbox):
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
self._prefix = os.path.join(self._sandbox,'')
AbstractSandbox.__init__(self)
def _violation(self, operation, *args, **kw):
raise SandboxViolation(operation, args, kw)
def _open(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU') and not self._ok(path):
self._violation("open", path, mode, *args, **kw)
return _open(path,mode,*args,**kw)
def tmpnam(self):
self._violation("tmpnam")
def _ok(self,path):
active = self._active
try:
self._active = False
realpath = os.path.normcase(os.path.realpath(path))
if realpath==self._sandbox or realpath.startswith(self._prefix):
return True
finally:
self._active = active
def _remap_input(self,operation,path,*args,**kw):
"""Called for path inputs"""
if operation in self.write_ops and not self._ok(path):
self._violation(operation, os.path.realpath(path), *args, **kw)
return path
def _remap_pair(self,operation,src,dst,*args,**kw):
"""Called for path pairs like rename, link, and symlink operations"""
if not self._ok(src) or not self._ok(dst):
self._violation(operation, src, dst, *args, **kw)
return (src,dst)
class SandboxViolation(DistutilsError):
"""A setup script attempted to modify the filesystem outside the sandbox"""
def __str__(self):
return """SandboxViolation: %s%r %s
The package setup script has attempted to modify files on your system
that are not within the EasyInstall build area, and has been aborted.
This package cannot be safely installed by EasyInstall, and may not
support alternate installation locations even if you run its setup
script by hand. Please inform the package's author and the EasyInstall
maintainers to find out if a fix or workaround is available.""" % self.args

82
Lib/setuptools/site-patch.py Executable file
View File

@ -0,0 +1,82 @@
def __boot():
import sys, imp, os, os.path
PYTHONPATH = os.environ.get('PYTHONPATH')
if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
PYTHONPATH = []
else:
PYTHONPATH = PYTHONPATH.split(os.pathsep)
pic = getattr(sys,'path_importer_cache',{})
stdpath = sys.path[len(PYTHONPATH):]
mydir = os.path.dirname(__file__)
#print "searching",stdpath,sys.path
for item in stdpath:
if item==mydir or not item:
continue # skip if current dir. on Windows, or my own directory
importer = pic.get(item)
if importer is not None:
loader = importer.find_module('site')
if loader is not None:
# This should actually reload the current module
loader.load_module('site')
break
else:
try:
stream, path, descr = imp.find_module('site',[item])
except ImportError:
continue
if stream is None:
continue
try:
# This should actually reload the current module
imp.load_module('site',stream,path,descr)
finally:
stream.close()
break
else:
raise ImportError("Couldn't find the real 'site' module")
#print "loaded", __file__
known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
oldpos = getattr(sys,'__egginsert',0) # save old insertion position
sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH:
addsitedir(item)
sys.__egginsert += oldpos # restore effective old position
d,nd = makepath(stdpath[0])
insert_at = None
new_path = []
for item in sys.path:
p,np = makepath(item)
if np==nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
insert_at = len(new_path)
if np in known_paths or insert_at is None:
new_path.append(item)
else:
# new path after the insert point, back-insert it
new_path.insert(insert_at, item)
insert_at += 1
sys.path[:] = new_path
if __name__=='site':
__boot()
del __boot

View File

@ -0,0 +1,369 @@
"""Tests for the 'setuptools' package"""
from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader
import distutils.core, distutils.cmd
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
from distutils.errors import DistutilsSetupError
import setuptools, setuptools.dist
from setuptools import Feature
from distutils.core import Extension
from setuptools.depends import extract_constant, get_module_constant
from setuptools.depends import find_module, Require
from distutils.version import StrictVersion, LooseVersion
from distutils.util import convert_path
import sys, os.path
def additional_tests():
import doctest
return doctest.DocFileSuite(
'api_tests.txt', optionflags=doctest.ELLIPSIS, package=__name__,
)
def makeSetup(**args):
"""Return distribution from 'setup(**args)', without executing commands"""
distutils.core._setup_stop_after = "commandline"
# Don't let system command line leak into tests!
args.setdefault('script_args',['install'])
try:
return setuptools.setup(**args)
finally:
distutils.core_setup_stop_after = None
class DependsTests(TestCase):
def testExtractConst(self):
from setuptools.depends import extract_constant
def f1():
global x,y,z
x = "test"
y = z
# unrecognized name
self.assertEqual(extract_constant(f1.func_code,'q', -1), None)
# constant assigned
self.assertEqual(extract_constant(f1.func_code,'x', -1), "test")
# expression assigned
self.assertEqual(extract_constant(f1.func_code,'y', -1), -1)
# recognized name, not assigned
self.assertEqual(extract_constant(f1.func_code,'z', -1), None)
def testFindModule(self):
self.assertRaises(ImportError, find_module, 'no-such.-thing')
self.assertRaises(ImportError, find_module, 'setuptools.non-existent')
f,p,i = find_module('setuptools.tests'); f.close()
def testModuleExtract(self):
from distutils import __version__
self.assertEqual(
get_module_constant('distutils','__version__'), __version__
)
self.assertEqual(
get_module_constant('sys','version'), sys.version
)
self.assertEqual(
get_module_constant('setuptools.tests','__doc__'),__doc__
)
def testRequire(self):
req = Require('Distutils','1.0.3','distutils')
self.assertEqual(req.name, 'Distutils')
self.assertEqual(req.module, 'distutils')
self.assertEqual(req.requested_version, '1.0.3')
self.assertEqual(req.attribute, '__version__')
self.assertEqual(req.full_name(), 'Distutils-1.0.3')
from distutils import __version__
self.assertEqual(req.get_version(), __version__)
self.failUnless(req.version_ok('1.0.9'))
self.failIf(req.version_ok('0.9.1'))
self.failIf(req.version_ok('unknown'))
self.failUnless(req.is_present())
self.failUnless(req.is_current())
req = Require('Distutils 3000','03000','distutils',format=LooseVersion)
self.failUnless(req.is_present())
self.failIf(req.is_current())
self.failIf(req.version_ok('unknown'))
req = Require('Do-what-I-mean','1.0','d-w-i-m')
self.failIf(req.is_present())
self.failIf(req.is_current())
req = Require('Tests', None, 'tests', homepage="http://example.com")
self.assertEqual(req.format, None)
self.assertEqual(req.attribute, None)
self.assertEqual(req.requested_version, None)
self.assertEqual(req.full_name(), 'Tests')
self.assertEqual(req.homepage, 'http://example.com')
paths = [os.path.dirname(p) for p in __path__]
self.failUnless(req.is_present(paths))
self.failUnless(req.is_current(paths))
class DistroTests(TestCase):
def setUp(self):
self.e1 = Extension('bar.ext',['bar.c'])
self.e2 = Extension('c.y', ['y.c'])
self.dist = makeSetup(
packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
py_modules=['b.d','x'],
ext_modules = (self.e1, self.e2),
package_dir = {},
)
def testDistroType(self):
self.failUnless(isinstance(self.dist,setuptools.dist.Distribution))
def testExcludePackage(self):
self.dist.exclude_package('a')
self.assertEqual(self.dist.packages, ['b','c'])
self.dist.exclude_package('b')
self.assertEqual(self.dist.packages, ['c'])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
self.dist.exclude_package('c')
self.assertEqual(self.dist.packages, [])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1])
# test removals from unspecified options
makeSetup().exclude_package('x')
def testIncludeExclude(self):
# remove an extension
self.dist.exclude(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2])
# add it back in
self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
# should not add duplicate
self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
def testExcludePackages(self):
self.dist.exclude(packages=['c','b','a'])
self.assertEqual(self.dist.packages, [])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1])
def testEmpty(self):
dist = makeSetup()
dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
dist = makeSetup()
dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
def testContents(self):
self.failUnless(self.dist.has_contents_for('a'))
self.dist.exclude_package('a')
self.failIf(self.dist.has_contents_for('a'))
self.failUnless(self.dist.has_contents_for('b'))
self.dist.exclude_package('b')
self.failIf(self.dist.has_contents_for('b'))
self.failUnless(self.dist.has_contents_for('c'))
self.dist.exclude_package('c')
self.failIf(self.dist.has_contents_for('c'))
def testInvalidIncludeExclude(self):
self.assertRaises(DistutilsSetupError,
self.dist.include, nonexistent_option='x'
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, nonexistent_option='x'
)
self.assertRaises(DistutilsSetupError,
self.dist.include, packages={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, packages={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.include, ext_modules={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, ext_modules={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.include, package_dir=['q']
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, package_dir=['q']
)
class FeatureTests(TestCase):
def setUp(self):
self.req = Require('Distutils','1.0.3','distutils')
self.dist = makeSetup(
features={
'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
'bar': Feature("bar", standard=True, packages=['pkg.bar'],
py_modules=['bar_et'], remove=['bar.ext'],
),
'baz': Feature(
"baz", optional=False, packages=['pkg.baz'],
scripts = ['scripts/baz_it'],
libraries=[('libfoo','foo/foofoo.c')]
),
'dwim': Feature("DWIM", available=False, remove='bazish'),
},
script_args=['--without-bar', 'install'],
packages = ['pkg.bar', 'pkg.foo'],
py_modules = ['bar_et', 'bazish'],
ext_modules = [Extension('bar.ext',['bar.c'])]
)
def testDefaults(self):
self.failIf(
Feature(
"test",standard=True,remove='x',available=False
).include_by_default()
)
self.failUnless(
Feature("test",standard=True,remove='x').include_by_default()
)
# Feature must have either kwargs, removes, or require_features
self.assertRaises(DistutilsSetupError, Feature, "test")
def testAvailability(self):
self.assertRaises(
DistutilsPlatformError,
self.dist.features['dwim'].include_in, self.dist
)
def testFeatureOptions(self):
dist = self.dist
self.failUnless(
('with-dwim',None,'include DWIM') in dist.feature_options
)
self.failUnless(
('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
)
self.failUnless(
('with-bar',None,'include bar (default)') in dist.feature_options
)
self.failUnless(
('without-bar',None,'exclude bar') in dist.feature_options
)
self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
self.failIf('without-baz' in dist.feature_negopt)
def testUseFeatures(self):
dist = self.dist
self.assertEqual(dist.with_foo,1)
self.assertEqual(dist.with_bar,0)
self.assertEqual(dist.with_baz,1)
self.failIf('bar_et' in dist.py_modules)
self.failIf('pkg.bar' in dist.packages)
self.failUnless('pkg.baz' in dist.packages)
self.failUnless('scripts/baz_it' in dist.scripts)
self.failUnless(('libfoo','foo/foofoo.c') in dist.libraries)
self.assertEqual(dist.ext_modules,[])
self.assertEqual(dist.require_features, [self.req])
# If we ask for bar, it should fail because we explicitly disabled
# it on the command line
self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
def testFeatureWithInvalidRemove(self):
self.assertRaises(
SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
)
class TestCommandTests(TestCase):
def testTestIsCommand(self):
test_cmd = makeSetup().get_command_obj('test')
self.failUnless(isinstance(test_cmd, distutils.cmd.Command))
def testLongOptSuiteWNoDefault(self):
ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
ts1 = ts1.get_command_obj('test')
ts1.ensure_finalized()
self.assertEqual(ts1.test_suite, 'foo.tests.suite')
def testDefaultSuite(self):
ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
ts2.ensure_finalized()
self.assertEqual(ts2.test_suite, 'bar.tests.suite')
def testDefaultWModuleOnCmdLine(self):
ts3 = makeSetup(
test_suite='bar.tests',
script_args=['test','-m','foo.tests']
).get_command_obj('test')
ts3.ensure_finalized()
self.assertEqual(ts3.test_module, 'foo.tests')
self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
def testConflictingOptions(self):
ts4 = makeSetup(
script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
).get_command_obj('test')
self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
def testNoSuite(self):
ts5 = makeSetup().get_command_obj('test')
ts5.ensure_finalized()
self.assertEqual(ts5.test_suite, None)

View File

@ -0,0 +1,330 @@
Pluggable Distributions of Python Software
==========================================
Distributions
-------------
A "Distribution" is a collection of files that represent a "Release" of a
"Project" as of a particular point in time, denoted by a
"Version"::
>>> import sys, pkg_resources
>>> from pkg_resources import Distribution
>>> Distribution(project_name="Foo", version="1.2")
Foo 1.2
Distributions have a location, which can be a filename, URL, or really anything
else you care to use::
>>> dist = Distribution(
... location="http://example.com/something",
... project_name="Bar", version="0.9"
... )
>>> dist
Bar 0.9 (http://example.com/something)
Distributions have various introspectable attributes::
>>> dist.location
'http://example.com/something'
>>> dist.project_name
'Bar'
>>> dist.version
'0.9'
>>> dist.py_version == sys.version[:3]
True
>>> print dist.platform
None
Including various computed attributes::
>>> from pkg_resources import parse_version
>>> dist.parsed_version == parse_version(dist.version)
True
>>> dist.key # case-insensitive form of the project name
'bar'
Distributions are compared (and hashed) by version first::
>>> Distribution(version='1.0') == Distribution(version='1.0')
True
>>> Distribution(version='1.0') == Distribution(version='1.1')
False
>>> Distribution(version='1.0') < Distribution(version='1.1')
True
but also by project name (case-insensitive), platform, Python version,
location, etc.::
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="Foo",version="1.0")
True
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="foo",version="1.0")
True
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="Foo",version="1.1")
False
>>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
... Distribution(project_name="Foo",py_version="2.4",version="1.0")
False
>>> Distribution(location="spam",version="1.0") == \
... Distribution(location="spam",version="1.0")
True
>>> Distribution(location="spam",version="1.0") == \
... Distribution(location="baz",version="1.0")
False
Hash and compare distribution by prio/plat
Get version from metadata
provider capabilities
egg_name()
as_requirement()
from_location, from_filename (w/path normalization)
Releases may have zero or more "Requirements", which indicate
what releases of another project the release requires in order to
function. A Requirement names the other project, expresses some criteria
as to what releases of that project are acceptable, and lists any "Extras"
that the requiring release may need from that project. (An Extra is an
optional feature of a Release, that can only be used if its additional
Requirements are satisfied.)
The Working Set
---------------
A collection of active distributions is called a Working Set. Note that a
Working Set can contain any importable distribution, not just pluggable ones.
For example, the Python standard library is an importable distribution that
will usually be part of the Working Set, even though it is not pluggable.
Similarly, when you are doing development work on a project, the files you are
editing are also a Distribution. (And, with a little attention to the
directory names used, and including some additional metadata, such a
"development distribution" can be made pluggable as well.)
>>> from pkg_resources import WorkingSet
A working set's entries are the sys.path entries that correspond to the active
distributions. By default, the working set's entries are the items on
``sys.path``::
>>> ws = WorkingSet()
>>> ws.entries == sys.path
True
But you can also create an empty working set explicitly, and add distributions
to it::
>>> ws = WorkingSet([])
>>> ws.add(dist)
>>> ws.entries
['http://example.com/something']
>>> dist in ws
True
>>> Distribution('foo',version="") in ws
False
And you can iterate over its distributions::
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
Adding the same distribution more than once is a no-op::
>>> ws.add(dist)
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
For that matter, adding multiple distributions for the same project also does
nothing, because a working set can only hold one active distribution per
project -- the first one added to it::
>>> ws.add(
... Distribution(
... 'http://example.com/something', project_name="Bar",
... version="7.2"
... )
... )
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
You can append a path entry to a working set using ``add_entry()``::
>>> ws.entries
['http://example.com/something']
>>> ws.add_entry(pkg_resources.__file__)
>>> ws.entries
['http://example.com/something', '...pkg_resources.py...']
Multiple additions result in multiple entries, even if the entry is already in
the working set (because ``sys.path`` can contain the same entry more than
once)::
>>> ws.add_entry(pkg_resources.__file__)
>>> ws.entries
['...example.com...', '...pkg_resources...', '...pkg_resources...']
And you can specify the path entry a distribution was found under, using the
optional second parameter to ``add()``::
>>> ws = WorkingSet([])
>>> ws.add(dist,"foo")
>>> ws.entries
['foo']
But even if a distribution is found under multiple path entries, it still only
shows up once when iterating the working set:
>>> ws.add_entry(ws.entries[0])
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
>>> from pkg_resources import Requirement
>>> print ws.find(Requirement.parse("Foo==1.0")) # no match, return None
None
>>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution
Bar 0.9 (http://example.com/something)
Note that asking for a conflicting version of a distribution already in a
working set triggers a ``pkg_resources.VersionConflict`` error:
>>> ws.find(Requirement.parse("Bar==1.0")) # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
VersionConflict: (Bar 0.9 (http://example.com/something),
Requirement.parse('Bar==1.0'))
You can subscribe a callback function to receive notifications whenever a new
distribution is added to a working set. The callback is immediately invoked
once for each existing distribution in the working set, and then is called
again for new distributions added thereafter::
>>> def added(dist): print "Added", dist
>>> ws.subscribe(added)
Added Bar 0.9
>>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
>>> ws.add(foo12)
Added Foo 1.2
Note, however, that only the first distribution added for a given project name
will trigger a callback, even during the initial ``subscribe()`` callback::
>>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
>>> ws.add(foo14) # no callback, because Foo 1.2 is already active
>>> ws = WorkingSet([])
>>> ws.add(foo12)
>>> ws.add(foo14)
>>> ws.subscribe(added)
Added Foo 1.2
And adding a callback more than once has no effect, either::
>>> ws.subscribe(added) # no callbacks
# and no double-callbacks on subsequent additions, either
>>> just_a_test = Distribution(project_name="JustATest", version="0.99")
>>> ws.add(just_a_test)
Added JustATest 0.99
Finding Plugins
---------------
``WorkingSet`` objects can be used to figure out what plugins in an
``Environment`` can be loaded without any resolution errors::
>>> from pkg_resources import Environment
>>> plugins = Environment([]) # normally, a list of plugin directories
>>> plugins.add(foo12)
>>> plugins.add(foo14)
>>> plugins.add(just_a_test)
In the simplest case, we just get the newest version of each distribution in
the plugin environment::
>>> ws = WorkingSet([])
>>> ws.find_plugins(plugins)
([JustATest 0.99, Foo 1.4 (f14)], {})
But if there's a problem with a version conflict or missing requirements, the
method falls back to older versions, and the error info dict will contain an
exception instance for each unloadable plugin::
>>> ws.add(foo12) # this will conflict with Foo 1.4
>>> ws.find_plugins(plugins)
([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
But if you disallow fallbacks, the failed plugin will be skipped instead of
trying older versions::
>>> ws.find_plugins(plugins, fallback=False)
([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
Platform Compatibility Rules
----------------------------
On the Mac, there are potential compatibility issues for modules compiled
on newer versions of Mac OS X than what the user is running. Additionally,
Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
Basic equality works as on other platforms::
>>> from pkg_resources import compatible_platforms as cp
>>> reqd = 'macosx-10.4-ppc'
>>> cp(reqd, reqd)
True
>>> cp("win32", reqd)
False
Distributions made on other machine types are not compatible::
>>> cp("macosx-10.4-i386", reqd)
False
Distributions made on earlier versions of the OS are compatible, as
long as they are from the same top-level version. The patchlevel version
number does not matter::
>>> cp("macosx-10.4-ppc", reqd)
True
>>> cp("macosx-10.3-ppc", reqd)
True
>>> cp("macosx-10.5-ppc", reqd)
False
>>> cp("macosx-9.5-ppc", reqd)
False
Backwards compatibility for packages made via earlier versions of
setuptools is provided as well::
>>> cp("darwin-8.2.0-Power_Macintosh", reqd)
True
>>> cp("darwin-7.2.0-Power_Macintosh", reqd)
True
>>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
False

View File

@ -0,0 +1,492 @@
from unittest import TestCase, makeSuite
from pkg_resources import *
import pkg_resources, sys
from sets import ImmutableSet
class Metadata(EmptyProvider):
"""Mock object to return metadata as if from an on-disk distribution"""
def __init__(self,*pairs):
self.metadata = dict(pairs)
def has_metadata(self,name):
return name in self.metadata
def get_metadata(self,name):
return self.metadata[name]
def get_metadata_lines(self,name):
return yield_lines(self.get_metadata(name))
class DistroTests(TestCase):
def testCollection(self):
# empty path should produce no distributions
ad = Environment([], platform=None, python=None)
self.assertEqual(list(ad), [])
self.assertEqual(ad['FooPkg'],[])
ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
# Name is in there now
self.failUnless(ad['FooPkg'])
# But only 1 package
self.assertEqual(list(ad), ['foopkg'])
# Distributions sort by version
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
)
# Removing a distribution leaves sequence alone
ad.remove(ad['FooPkg'][1])
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.4','1.2']
)
# And inserting adds them in order
ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
)
ws = WorkingSet([])
foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
req, = parse_requirements("FooPkg>=1.3")
# Nominal case: no distros on path, should yield all applicable
self.assertEqual(ad.best_match(req,ws).version, '1.9')
# If a matching distro is already installed, should return only that
ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
# If the first matching distro is unsuitable, it's a version conflict
ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
self.assertRaises(VersionConflict, ad.best_match, req, ws)
# If more than one match on the path, the first one takes precedence
ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
self.assertEqual(ad.best_match(req,ws).version, '1.4')
def checkFooPkg(self,d):
self.assertEqual(d.project_name, "FooPkg")
self.assertEqual(d.key, "foopkg")
self.assertEqual(d.version, "1.3-1")
self.assertEqual(d.py_version, "2.4")
self.assertEqual(d.platform, "win32")
self.assertEqual(d.parsed_version, parse_version("1.3-1"))
def testDistroBasics(self):
d = Distribution(
"/some/path",
project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
)
self.checkFooPkg(d)
d = Distribution("/some/path")
self.assertEqual(d.py_version, sys.version[:3])
self.assertEqual(d.platform, None)
def testDistroParse(self):
d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
self.checkFooPkg(d)
d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
self.checkFooPkg(d)
def testDistroMetadata(self):
d = Distribution(
"/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
metadata = Metadata(
('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
)
)
self.checkFooPkg(d)
def distRequires(self, txt):
return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
def checkRequires(self, dist, txt, extras=()):
self.assertEqual(
list(dist.requires(extras)),
list(parse_requirements(txt))
)
def testDistroDependsSimple(self):
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
self.checkRequires(self.distRequires(v), v)
def testResolve(self):
ad = Environment([]); ws = WorkingSet([])
# Resolving no requirements -> nothing to install
self.assertEqual( list(ws.resolve([],ad)), [] )
# Request something not in the collection -> DistributionNotFound
self.assertRaises(
DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
)
Foo = Distribution.from_filename(
"/foo_dir/Foo-1.2.egg",
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
)
ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
# Request thing(s) that are available -> list to activate
for i in range(3):
targets = list(ws.resolve(parse_requirements("Foo"), ad))
self.assertEqual(targets, [Foo])
map(ws.add,targets)
self.assertRaises(VersionConflict, ws.resolve,
parse_requirements("Foo==0.9"), ad)
ws = WorkingSet([]) # reset
# Request an extra that causes an unresolved dependency for "Baz"
self.assertRaises(
DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
)
Baz = Distribution.from_filename(
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
)
ad.add(Baz)
# Activation list now includes resolved dependency
self.assertEqual(
list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
)
# Requests for conflicting versions produce VersionConflict
self.assertRaises( VersionConflict,
ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
)
def testDistroDependsOptions(self):
d = self.distRequires("""
Twisted>=1.5
[docgen]
ZConfig>=2.0
docutils>=0.3
[fastcgi]
fcgiapp>=0.1""")
self.checkRequires(d,"Twisted>=1.5")
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
["docgen","fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
["fastcgi", "docgen"]
)
self.assertRaises(UnknownExtra, d.requires, ["foo"])
class EntryPointTests(TestCase):
def assertfields(self, ep):
self.assertEqual(ep.name,"foo")
self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
self.assertEqual(ep.attrs, ("EntryPointTests",))
self.assertEqual(ep.extras, ("x",))
self.failUnless(ep.load() is EntryPointTests)
self.assertEqual(
str(ep),
"foo = setuptools.tests.test_resources:EntryPointTests [x]"
)
def setUp(self):
self.dist = Distribution.from_filename(
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
def testBasics(self):
ep = EntryPoint(
"foo", "setuptools.tests.test_resources", ["EntryPointTests"],
["x"], self.dist
)
self.assertfields(ep)
def testParse(self):
s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
ep = EntryPoint.parse(s, self.dist)
self.assertfields(ep)
ep = EntryPoint.parse("bar baz= spammity[PING]")
self.assertEqual(ep.name,"bar baz")
self.assertEqual(ep.module_name,"spammity")
self.assertEqual(ep.attrs, ())
self.assertEqual(ep.extras, ("ping",))
ep = EntryPoint.parse(" fizzly = wocka:foo")
self.assertEqual(ep.name,"fizzly")
self.assertEqual(ep.module_name,"wocka")
self.assertEqual(ep.attrs, ("foo",))
self.assertEqual(ep.extras, ())
def testRejects(self):
for ep in [
"foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
]:
try: EntryPoint.parse(ep)
except ValueError: pass
else: raise AssertionError("Should've been bad", ep)
def checkSubMap(self, m):
self.assertEqual(str(m),
"{"
"'feature2': EntryPoint.parse("
"'feature2 = another.module:SomeClass [extra1,extra2]'), "
"'feature1': EntryPoint.parse("
"'feature1 = somemodule:somefunction')"
"}"
)
submap_str = """
# define features for blah blah
feature1 = somemodule:somefunction
feature2 = another.module:SomeClass [extra1,extra2]
"""
def testParseList(self):
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
self.assertRaises(ValueError, EntryPoint.parse_group, "x",
["foo=baz", "foo=bar"])
def testParseMap(self):
m = EntryPoint.parse_map({'xyz':self.submap_str})
self.checkSubMap(m['xyz'])
self.assertEqual(m.keys(),['xyz'])
m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
self.checkSubMap(m['xyz'])
self.assertEqual(m.keys(),['xyz'])
self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
class RequirementsTests(TestCase):
def testBasics(self):
r = Requirement.parse("Twisted>=1.2")
self.assertEqual(str(r),"Twisted>=1.2")
self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
def testOrdering(self):
r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
self.assertEqual(r1,r2)
self.assertEqual(str(r1),str(r2))
self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
def testBasicContains(self):
r = Requirement("Twisted", [('>=','1.2')], ())
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
twist11 = Distribution.from_filename("Twisted-1.1.egg")
twist12 = Distribution.from_filename("Twisted-1.2.egg")
self.failUnless(parse_version('1.2') in r)
self.failUnless(parse_version('1.1') not in r)
self.failUnless('1.2' in r)
self.failUnless('1.1' not in r)
self.failUnless(foo_dist not in r)
self.failUnless(twist11 not in r)
self.failUnless(twist12 in r)
def testAdvancedContains(self):
r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
self.failUnless(v in r, (v,r))
for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
self.failUnless(v not in r, (v,r))
def testOptionsAndHashing(self):
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
self.assertEqual(r1,r2)
self.assertEqual(r1,r3)
self.assertEqual(r1.extras, ("foo","bar"))
self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
self.assertEqual(hash(r1), hash(r2))
self.assertEqual(
hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
ImmutableSet(["foo","bar"])))
)
def testVersionEquality(self):
r1 = Requirement.parse("setuptools==0.3a2")
r2 = Requirement.parse("setuptools!=0.3a4")
d = Distribution.from_filename
self.failIf(d("setuptools-0.3a4.egg") in r1)
self.failIf(d("setuptools-0.3a1.egg") in r1)
self.failIf(d("setuptools-0.3a4.egg") in r2)
self.failUnless(d("setuptools-0.3a2.egg") in r1)
self.failUnless(d("setuptools-0.3a2.egg") in r2)
self.failUnless(d("setuptools-0.3a3.egg") in r2)
self.failUnless(d("setuptools-0.3a5.egg") in r2)
class ParseTests(TestCase):
def testEmptyParse(self):
self.assertEqual(list(parse_requirements('')), [])
def testYielding(self):
for inp,out in [
([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
(['x\n\n','y'], ['x','y']),
]:
self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
def testSplitting(self):
self.assertEqual(
list(
pkg_resources.split_sections("""
x
[Y]
z
a
[b ]
# foo
c
[ d]
[q]
v
"""
)
),
[(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
)
self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
def testSafeName(self):
self.assertEqual(safe_name("adns-python"), "adns-python")
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
self.assertNotEqual(safe_name("peak.web"), "peak-web")
def testSafeVersion(self):
self.assertEqual(safe_version("1.2-1"), "1.2-1")
self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
self.assertEqual(safe_version("peak.web"), "peak.web")
def testSimpleRequirements(self):
self.assertEqual(
list(parse_requirements('Twis-Ted>=1.2-1')),
[Requirement('Twis-Ted',[('>=','1.2-1')], ())]
)
self.assertEqual(
list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
[Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
)
self.assertEqual(
Requirement.parse("FooBar==1.99a3"),
Requirement("FooBar", [('==','1.99a3')], ())
)
self.assertRaises(ValueError,Requirement.parse,">=2.3")
self.assertRaises(ValueError,Requirement.parse,"x\\")
self.assertRaises(ValueError,Requirement.parse,"x==2 q")
self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
self.assertRaises(ValueError,Requirement.parse,"#")
def testVersionEquality(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
self.assertEqual(p1,p2, (s1,s2,p1,p2))
c('1.2-rc1', '1.2rc1')
c('0.4', '0.4.0')
c('0.4.0.0', '0.4.0')
c('0.4.0-0', '0.4-0')
c('0pl1', '0.0pl1')
c('0pre1', '0.0c1')
c('0.0.0preview1', '0c1')
c('0.0c1', '0-rc1')
c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
def testVersionOrdering(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
self.failUnless(p1<p2, (s1,s2,p1,p2))
c('2.1','2.1.1')
c('2a1','2b0')
c('2a1','2.1')
c('2.3a1', '2.3')
c('2.1-1', '2.1-2')
c('2.1-1', '2.1.1')
c('2.1', '2.1pl4')
c('2.1a0-20040501', '2.1')
c('1.1', '02.1')
c('A56','B27')
c('3.2', '3.2.pl0')
c('3.2-1', '3.2pl1')
c('3.2pl1', '3.2pl1-1')
c('0.4', '4.0')
c('0.0.4', '0.4.0')
c('0pl1', '0.4pl1')
c('2.1.0-rc1','2.1.0')
torture ="""
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
0.77.2-1 0.77.1-1 0.77.0-1
""".split()
for p,v1 in enumerate(torture):
for v2 in torture[p+1:]:
c(v2,v1)

View File

@ -0,0 +1,16 @@
"""Tests for setuptools.
The tests for setuptools are defined in the setuptools.tests package;
this runs them from there.
"""
import test.test_support
from setuptools.command.test import ScanningLoader
def test_main():
test.test_support.run_suite(
ScanningLoader().loadTestsFromName('setuptools.tests')
)
if __name__ == "__main__":
test_main()