2011-05-19 08:07:25 -03:00
|
|
|
"""Utilities to find and read config files used by packaging."""
|
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import logging
|
|
|
|
|
|
|
|
from shlex import split
|
|
|
|
from configparser import RawConfigParser
|
|
|
|
from packaging import logger
|
|
|
|
from packaging.errors import PackagingOptionError
|
|
|
|
from packaging.compiler.extension import Extension
|
2011-06-10 18:26:31 -03:00
|
|
|
from packaging.util import (check_environ, iglob, resolve_name, strtobool,
|
|
|
|
split_multiline)
|
2011-05-19 08:07:25 -03:00
|
|
|
from packaging.compiler import set_compiler
|
|
|
|
from packaging.command import set_command
|
|
|
|
from packaging.markers import interpret
|
|
|
|
|
|
|
|
|
2011-09-01 02:01:13 -03:00
|
|
|
def _check_name(name, packages):
|
|
|
|
if '.' not in name:
|
|
|
|
return
|
|
|
|
parts = name.split('.')
|
|
|
|
parent = '.'.join(parts[:-1])
|
|
|
|
if parent not in packages:
|
|
|
|
# we could log a warning instead of raising, but what's the use
|
|
|
|
# of letting people build modules they can't import?
|
|
|
|
raise PackagingOptionError(
|
|
|
|
'parent package for extension %r not found' % name)
|
|
|
|
|
|
|
|
|
2011-05-19 08:07:25 -03:00
|
|
|
def _pop_values(values_dct, key):
|
|
|
|
"""Remove values from the dictionary and convert them as a list"""
|
|
|
|
vals_str = values_dct.pop(key, '')
|
|
|
|
if not vals_str:
|
|
|
|
return
|
|
|
|
fields = []
|
2011-05-21 07:00:10 -03:00
|
|
|
# the line separator is \n for setup.cfg files
|
|
|
|
for field in vals_str.split('\n'):
|
2011-05-19 08:07:25 -03:00
|
|
|
tmp_vals = field.split('--')
|
|
|
|
if len(tmp_vals) == 2 and not interpret(tmp_vals[1]):
|
|
|
|
continue
|
|
|
|
fields.append(tmp_vals[0])
|
|
|
|
# Get bash options like `gcc -print-file-name=libgcc.a` XXX bash options?
|
|
|
|
vals = split(' '.join(fields))
|
|
|
|
if vals:
|
|
|
|
return vals
|
|
|
|
|
|
|
|
|
|
|
|
def _rel_path(base, path):
|
2011-05-21 06:48:16 -03:00
|
|
|
# normalizes and returns a lstripped-/-separated path
|
|
|
|
base = base.replace(os.path.sep, '/')
|
|
|
|
path = path.replace(os.path.sep, '/')
|
2011-05-19 08:07:25 -03:00
|
|
|
assert path.startswith(base)
|
|
|
|
return path[len(base):].lstrip('/')
|
|
|
|
|
|
|
|
|
|
|
|
def get_resources_dests(resources_root, rules):
|
|
|
|
"""Find destinations for resources files"""
|
|
|
|
destinations = {}
|
|
|
|
for base, suffix, dest in rules:
|
|
|
|
prefix = os.path.join(resources_root, base)
|
|
|
|
for abs_base in iglob(prefix):
|
|
|
|
abs_glob = os.path.join(abs_base, suffix)
|
|
|
|
for abs_path in iglob(abs_glob):
|
|
|
|
resource_file = _rel_path(resources_root, abs_path)
|
|
|
|
if dest is None: # remove the entry if it was here
|
|
|
|
destinations.pop(resource_file, None)
|
|
|
|
else:
|
|
|
|
rel_path = _rel_path(abs_base, abs_path)
|
2011-05-21 06:48:16 -03:00
|
|
|
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
|
|
|
|
destinations[resource_file] = rel_dest + '/' + rel_path
|
2011-05-19 08:07:25 -03:00
|
|
|
return destinations
|
|
|
|
|
|
|
|
|
|
|
|
class Config:
|
2011-06-10 19:33:38 -03:00
|
|
|
"""Class used to work with configuration files"""
|
2011-05-19 08:07:25 -03:00
|
|
|
def __init__(self, dist):
|
|
|
|
self.dist = dist
|
2011-06-10 19:33:38 -03:00
|
|
|
self.setup_hooks = []
|
2011-05-19 08:07:25 -03:00
|
|
|
|
2011-06-10 19:33:38 -03:00
|
|
|
def run_hooks(self, config):
|
|
|
|
"""Run setup hooks in the order defined in the spec."""
|
|
|
|
for hook in self.setup_hooks:
|
|
|
|
hook(config)
|
2011-05-19 08:07:25 -03:00
|
|
|
|
|
|
|
def find_config_files(self):
|
|
|
|
"""Find as many configuration files as should be processed for this
|
|
|
|
platform, and return a list of filenames in the order in which they
|
|
|
|
should be parsed. The filenames returned are guaranteed to exist
|
|
|
|
(modulo nasty race conditions).
|
|
|
|
|
|
|
|
There are three possible config files: packaging.cfg in the
|
|
|
|
Packaging installation directory (ie. where the top-level
|
|
|
|
Packaging __inst__.py file lives), a file in the user's home
|
|
|
|
directory named .pydistutils.cfg on Unix and pydistutils.cfg
|
|
|
|
on Windows/Mac; and setup.cfg in the current directory.
|
|
|
|
|
|
|
|
The file in the user's home directory can be disabled with the
|
|
|
|
--no-user-cfg option.
|
|
|
|
"""
|
|
|
|
files = []
|
|
|
|
check_environ()
|
|
|
|
|
|
|
|
# Where to look for the system-wide Packaging config file
|
|
|
|
sys_dir = os.path.dirname(sys.modules['packaging'].__file__)
|
|
|
|
|
|
|
|
# Look for the system config file
|
|
|
|
sys_file = os.path.join(sys_dir, "packaging.cfg")
|
|
|
|
if os.path.isfile(sys_file):
|
|
|
|
files.append(sys_file)
|
|
|
|
|
|
|
|
# What to call the per-user config file
|
|
|
|
if os.name == 'posix':
|
|
|
|
user_filename = ".pydistutils.cfg"
|
|
|
|
else:
|
|
|
|
user_filename = "pydistutils.cfg"
|
|
|
|
|
|
|
|
# And look for the user config file
|
|
|
|
if self.dist.want_user_cfg:
|
|
|
|
user_file = os.path.join(os.path.expanduser('~'), user_filename)
|
|
|
|
if os.path.isfile(user_file):
|
|
|
|
files.append(user_file)
|
|
|
|
|
|
|
|
# All platforms support local setup.cfg
|
|
|
|
local_file = "setup.cfg"
|
|
|
|
if os.path.isfile(local_file):
|
|
|
|
files.append(local_file)
|
|
|
|
|
|
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
|
|
logger.debug("using config files: %s", ', '.join(files))
|
|
|
|
return files
|
|
|
|
|
|
|
|
def _convert_metadata(self, name, value):
|
|
|
|
# converts a value found in setup.cfg into a valid metadata
|
|
|
|
# XXX
|
|
|
|
return value
|
|
|
|
|
|
|
|
def _read_setup_cfg(self, parser, cfg_filename):
|
|
|
|
cfg_directory = os.path.dirname(os.path.abspath(cfg_filename))
|
|
|
|
content = {}
|
|
|
|
for section in parser.sections():
|
|
|
|
content[section] = dict(parser.items(section))
|
|
|
|
|
2011-06-10 19:33:38 -03:00
|
|
|
# global setup hooks are called first
|
2011-05-19 08:07:25 -03:00
|
|
|
if 'global' in content:
|
2011-06-10 19:33:38 -03:00
|
|
|
if 'setup_hooks' in content['global']:
|
|
|
|
setup_hooks = split_multiline(content['global']['setup_hooks'])
|
|
|
|
|
2011-06-19 16:23:43 -03:00
|
|
|
# add project directory to sys.path, to allow hooks to be
|
|
|
|
# distributed with the project
|
|
|
|
sys.path.insert(0, cfg_directory)
|
|
|
|
try:
|
|
|
|
for line in setup_hooks:
|
|
|
|
try:
|
|
|
|
hook = resolve_name(line)
|
|
|
|
except ImportError as e:
|
2011-09-01 02:01:13 -03:00
|
|
|
logger.warning('cannot find setup hook: %s',
|
|
|
|
e.args[0])
|
2011-06-19 16:23:43 -03:00
|
|
|
else:
|
|
|
|
self.setup_hooks.append(hook)
|
|
|
|
self.run_hooks(content)
|
|
|
|
finally:
|
|
|
|
sys.path.pop(0)
|
2011-05-19 08:07:25 -03:00
|
|
|
|
|
|
|
metadata = self.dist.metadata
|
|
|
|
|
|
|
|
# setting the metadata values
|
|
|
|
if 'metadata' in content:
|
|
|
|
for key, value in content['metadata'].items():
|
|
|
|
key = key.replace('_', '-')
|
|
|
|
if metadata.is_multi_field(key):
|
2011-06-10 18:26:31 -03:00
|
|
|
value = split_multiline(value)
|
2011-05-19 08:07:25 -03:00
|
|
|
|
|
|
|
if key == 'project-url':
|
|
|
|
value = [(label.strip(), url.strip())
|
|
|
|
for label, url in
|
|
|
|
[v.split(',') for v in value]]
|
|
|
|
|
|
|
|
if key == 'description-file':
|
|
|
|
if 'description' in content['metadata']:
|
|
|
|
msg = ("description and description-file' are "
|
|
|
|
"mutually exclusive")
|
|
|
|
raise PackagingOptionError(msg)
|
|
|
|
|
2011-06-10 19:21:18 -03:00
|
|
|
filenames = value.split()
|
2011-05-19 08:07:25 -03:00
|
|
|
|
2011-06-10 19:21:18 -03:00
|
|
|
# concatenate all files
|
|
|
|
value = []
|
2011-05-19 08:07:25 -03:00
|
|
|
for filename in filenames:
|
|
|
|
# will raise if file not found
|
|
|
|
with open(filename) as description_file:
|
2011-06-10 19:21:18 -03:00
|
|
|
value.append(description_file.read().strip())
|
2011-05-19 08:07:25 -03:00
|
|
|
# add filename as a required file
|
|
|
|
if filename not in metadata.requires_files:
|
|
|
|
metadata.requires_files.append(filename)
|
2011-06-10 19:21:18 -03:00
|
|
|
value = '\n'.join(value).strip()
|
2011-05-19 08:07:25 -03:00
|
|
|
key = 'description'
|
|
|
|
|
|
|
|
if metadata.is_metadata_field(key):
|
|
|
|
metadata[key] = self._convert_metadata(key, value)
|
|
|
|
|
|
|
|
if 'files' in content:
|
|
|
|
files = content['files']
|
|
|
|
self.dist.package_dir = files.pop('packages_root', None)
|
|
|
|
|
2011-06-10 18:26:31 -03:00
|
|
|
files = dict((key, split_multiline(value)) for key, value in
|
2011-05-19 08:07:25 -03:00
|
|
|
files.items())
|
|
|
|
|
|
|
|
self.dist.packages = []
|
|
|
|
|
|
|
|
packages = files.get('packages', [])
|
|
|
|
if isinstance(packages, str):
|
|
|
|
packages = [packages]
|
|
|
|
|
|
|
|
for package in packages:
|
|
|
|
if ':' in package:
|
|
|
|
dir_, package = package.split(':')
|
|
|
|
self.dist.package_dir[package] = dir_
|
|
|
|
self.dist.packages.append(package)
|
|
|
|
|
|
|
|
self.dist.py_modules = files.get('modules', [])
|
|
|
|
if isinstance(self.dist.py_modules, str):
|
|
|
|
self.dist.py_modules = [self.dist.py_modules]
|
|
|
|
self.dist.scripts = files.get('scripts', [])
|
|
|
|
if isinstance(self.dist.scripts, str):
|
|
|
|
self.dist.scripts = [self.dist.scripts]
|
|
|
|
|
|
|
|
self.dist.package_data = {}
|
2012-02-04 16:53:07 -04:00
|
|
|
# bookkeeping for the loop below
|
|
|
|
firstline = True
|
|
|
|
prev = None
|
|
|
|
|
2011-09-10 13:22:04 -03:00
|
|
|
for line in files.get('package_data', []):
|
2012-02-04 16:53:07 -04:00
|
|
|
if '=' in line:
|
|
|
|
# package name -- file globs or specs
|
|
|
|
key, value = line.split('=')
|
|
|
|
prev = self.dist.package_data[key.strip()] = value.split()
|
|
|
|
elif firstline:
|
|
|
|
# invalid continuation on the first line
|
|
|
|
raise PackagingOptionError(
|
|
|
|
'malformed package_data first line: %r (misses "=")' %
|
|
|
|
line)
|
|
|
|
else:
|
|
|
|
# continuation, add to last seen package name
|
|
|
|
prev.extend(line.split())
|
|
|
|
|
|
|
|
firstline = False
|
2011-05-19 08:07:25 -03:00
|
|
|
|
|
|
|
self.dist.data_files = []
|
|
|
|
for data in files.get('data_files', []):
|
|
|
|
data = data.split('=')
|
|
|
|
if len(data) != 2:
|
|
|
|
continue
|
|
|
|
key, value = data
|
|
|
|
values = [v.strip() for v in value.split(',')]
|
|
|
|
self.dist.data_files.append((key, values))
|
|
|
|
|
|
|
|
# manifest template
|
|
|
|
self.dist.extra_files = files.get('extra_files', [])
|
|
|
|
|
|
|
|
resources = []
|
|
|
|
for rule in files.get('resources', []):
|
|
|
|
glob, destination = rule.split('=', 1)
|
|
|
|
rich_glob = glob.strip().split(' ', 1)
|
|
|
|
if len(rich_glob) == 2:
|
|
|
|
prefix, suffix = rich_glob
|
|
|
|
else:
|
|
|
|
assert len(rich_glob) == 1
|
|
|
|
prefix = ''
|
|
|
|
suffix = glob
|
|
|
|
if destination == '<exclude>':
|
|
|
|
destination = None
|
|
|
|
resources.append(
|
|
|
|
(prefix.strip(), suffix.strip(), destination.strip()))
|
|
|
|
self.dist.data_files = get_resources_dests(
|
|
|
|
cfg_directory, resources)
|
|
|
|
|
|
|
|
ext_modules = self.dist.ext_modules
|
|
|
|
for section_key in content:
|
2011-09-01 01:29:11 -03:00
|
|
|
# no str.partition in 2.4 :(
|
|
|
|
labels = section_key.split(':')
|
2011-05-19 08:07:25 -03:00
|
|
|
if len(labels) == 2 and labels[0] == 'extension':
|
|
|
|
values_dct = content[section_key]
|
2011-09-01 01:29:11 -03:00
|
|
|
if 'name' in values_dct:
|
|
|
|
raise PackagingOptionError(
|
|
|
|
'extension name should be given as [extension: name], '
|
|
|
|
'not as key')
|
2011-09-01 02:01:13 -03:00
|
|
|
name = labels[1].strip()
|
|
|
|
_check_name(name, self.dist.packages)
|
2011-05-19 08:07:25 -03:00
|
|
|
ext_modules.append(Extension(
|
2011-09-01 02:01:13 -03:00
|
|
|
name,
|
2011-05-19 08:07:25 -03:00
|
|
|
_pop_values(values_dct, 'sources'),
|
|
|
|
_pop_values(values_dct, 'include_dirs'),
|
|
|
|
_pop_values(values_dct, 'define_macros'),
|
|
|
|
_pop_values(values_dct, 'undef_macros'),
|
|
|
|
_pop_values(values_dct, 'library_dirs'),
|
|
|
|
_pop_values(values_dct, 'libraries'),
|
|
|
|
_pop_values(values_dct, 'runtime_library_dirs'),
|
|
|
|
_pop_values(values_dct, 'extra_objects'),
|
|
|
|
_pop_values(values_dct, 'extra_compile_args'),
|
|
|
|
_pop_values(values_dct, 'extra_link_args'),
|
|
|
|
_pop_values(values_dct, 'export_symbols'),
|
|
|
|
_pop_values(values_dct, 'swig_opts'),
|
|
|
|
_pop_values(values_dct, 'depends'),
|
|
|
|
values_dct.pop('language', None),
|
|
|
|
values_dct.pop('optional', None),
|
|
|
|
**values_dct))
|
|
|
|
|
|
|
|
def parse_config_files(self, filenames=None):
|
|
|
|
if filenames is None:
|
|
|
|
filenames = self.find_config_files()
|
|
|
|
|
|
|
|
logger.debug("Distribution.parse_config_files():")
|
|
|
|
|
|
|
|
parser = RawConfigParser()
|
|
|
|
|
|
|
|
for filename in filenames:
|
|
|
|
logger.debug(" reading %s", filename)
|
2011-05-19 13:45:32 -03:00
|
|
|
parser.read(filename, encoding='utf-8')
|
2011-05-19 08:07:25 -03:00
|
|
|
|
|
|
|
if os.path.split(filename)[-1] == 'setup.cfg':
|
|
|
|
self._read_setup_cfg(parser, filename)
|
|
|
|
|
|
|
|
for section in parser.sections():
|
|
|
|
if section == 'global':
|
|
|
|
if parser.has_option('global', 'compilers'):
|
|
|
|
self._load_compilers(parser.get('global', 'compilers'))
|
|
|
|
|
|
|
|
if parser.has_option('global', 'commands'):
|
|
|
|
self._load_commands(parser.get('global', 'commands'))
|
|
|
|
|
|
|
|
options = parser.options(section)
|
|
|
|
opt_dict = self.dist.get_option_dict(section)
|
|
|
|
|
|
|
|
for opt in options:
|
|
|
|
if opt == '__name__':
|
|
|
|
continue
|
|
|
|
val = parser.get(section, opt)
|
|
|
|
opt = opt.replace('-', '_')
|
|
|
|
|
|
|
|
if opt == 'sub_commands':
|
2011-06-10 18:26:31 -03:00
|
|
|
val = split_multiline(val)
|
2011-05-19 08:07:25 -03:00
|
|
|
if isinstance(val, str):
|
|
|
|
val = [val]
|
|
|
|
|
|
|
|
# Hooks use a suffix system to prevent being overriden
|
|
|
|
# by a config file processed later (i.e. a hook set in
|
|
|
|
# the user config file cannot be replaced by a hook
|
|
|
|
# set in a project config file, unless they have the
|
|
|
|
# same suffix).
|
|
|
|
if (opt.startswith("pre_hook.") or
|
|
|
|
opt.startswith("post_hook.")):
|
|
|
|
hook_type, alias = opt.split(".")
|
|
|
|
hook_dict = opt_dict.setdefault(
|
|
|
|
hook_type, (filename, {}))[1]
|
|
|
|
hook_dict[alias] = val
|
|
|
|
else:
|
|
|
|
opt_dict[opt] = filename, val
|
|
|
|
|
|
|
|
# Make the RawConfigParser forget everything (so we retain
|
|
|
|
# the original filenames that options come from)
|
|
|
|
parser.__init__()
|
|
|
|
|
|
|
|
# If there was a "global" section in the config file, use it
|
|
|
|
# to set Distribution options.
|
|
|
|
if 'global' in self.dist.command_options:
|
|
|
|
for opt, (src, val) in self.dist.command_options['global'].items():
|
|
|
|
alias = self.dist.negative_opt.get(opt)
|
|
|
|
try:
|
|
|
|
if alias:
|
|
|
|
setattr(self.dist, alias, not strtobool(val))
|
|
|
|
elif opt == 'dry_run': # FIXME ugh!
|
|
|
|
setattr(self.dist, opt, strtobool(val))
|
|
|
|
else:
|
|
|
|
setattr(self.dist, opt, val)
|
|
|
|
except ValueError as msg:
|
|
|
|
raise PackagingOptionError(msg)
|
|
|
|
|
|
|
|
def _load_compilers(self, compilers):
|
2011-06-10 18:26:31 -03:00
|
|
|
compilers = split_multiline(compilers)
|
2011-05-19 08:07:25 -03:00
|
|
|
if isinstance(compilers, str):
|
|
|
|
compilers = [compilers]
|
|
|
|
for compiler in compilers:
|
|
|
|
set_compiler(compiler.strip())
|
|
|
|
|
|
|
|
def _load_commands(self, commands):
|
2011-06-10 18:26:31 -03:00
|
|
|
commands = split_multiline(commands)
|
2011-05-19 08:07:25 -03:00
|
|
|
if isinstance(commands, str):
|
|
|
|
commands = [commands]
|
|
|
|
for command in commands:
|
|
|
|
set_command(command.strip())
|