merge heads

This commit is contained in:
Benjamin Peterson 2011-05-30 11:15:05 -05:00
commit c6c5e04611
13 changed files with 277 additions and 277 deletions

View File

@ -18,7 +18,7 @@ __all__ = [
'get_distributions', 'get_distribution', 'get_file_users',
'provides_distribution', 'obsoletes_distribution',
'enable_cache', 'disable_cache', 'clear_cache',
]
'get_file_path', 'get_file']
# TODO update docs
@ -627,3 +627,17 @@ def get_file_users(path):
for dist in get_distributions():
if dist.uses(path):
yield dist
def get_file_path(distribution_name, relative_path):
"""Return the path to a resource file."""
dist = get_distribution(distribution_name)
if dist != None:
return dist.get_resource_path(relative_path)
raise LookupError('no distribution named %r found' % distribution_name)
def get_file(distribution_name, relative_path, *args, **kwargs):
"""Open and return a resource file."""
return open(get_file_path(distribution_name, relative_path),
*args, **kwargs)

View File

@ -118,15 +118,15 @@ def install_local_project(path):
"""
path = os.path.abspath(path)
if os.path.isdir(path):
logger.info('installing from source directory: %s', path)
logger.info('Installing from source directory: %s', path)
_run_install_from_dir(path)
elif _is_archive_file(path):
logger.info('installing from archive: %s', path)
logger.info('Installing from archive: %s', path)
_unpacked_dir = tempfile.mkdtemp()
shutil.unpack_archive(path, _unpacked_dir)
_run_install_from_archive(_unpacked_dir)
else:
logger.warning('no projects to install')
logger.warning('No projects to install.')
def _run_install_from_archive(source_dir):
@ -174,16 +174,16 @@ def install_dists(dists, path, paths=sys.path):
installed_dists = []
for dist in dists:
logger.info('installing %s %s', dist.name, dist.version)
logger.info('Installing %r %s...', dist.name, dist.version)
try:
_install_dist(dist, path)
installed_dists.append(dist)
except Exception as e:
logger.info('failed: %s', e)
logger.info('Failed: %s', e)
# reverting
for installed_dist in installed_dists:
logger.info('reverting %s', installed_dist)
logger.info('Reverting %s', installed_dist)
_remove_dist(installed_dist, paths)
raise e
return installed_dists
@ -292,7 +292,7 @@ def get_infos(requirements, index=None, installed=None, prefer_final=True):
# or remove
if not installed:
logger.info('reading installed distributions')
logger.debug('Reading installed distributions')
installed = list(get_distributions(use_egg_info=True))
infos = {'install': [], 'remove': [], 'conflict': []}
@ -306,7 +306,7 @@ def get_infos(requirements, index=None, installed=None, prefer_final=True):
if predicate.name.lower() != installed_project.name.lower():
continue
found = True
logger.info('found %s %s', installed_project.name,
logger.info('Found %s %s', installed_project.name,
installed_project.metadata['version'])
# if we already have something installed, check it matches the
@ -316,7 +316,7 @@ def get_infos(requirements, index=None, installed=None, prefer_final=True):
break
if not found:
logger.info('project not installed')
logger.debug('Project not installed')
if not index:
index = wrapper.ClientWrapper()
@ -331,7 +331,7 @@ def get_infos(requirements, index=None, installed=None, prefer_final=True):
raise InstallationException('Release not found: "%s"' % requirements)
if release is None:
logger.info('could not find a matching project')
logger.info('Could not find a matching project')
return infos
metadata = release.fetch_metadata()
@ -348,7 +348,7 @@ def get_infos(requirements, index=None, installed=None, prefer_final=True):
# Get what the missing deps are
dists = depgraph.missing[release]
if dists:
logger.info("missing dependencies found, retrieving metadata")
logger.info("Missing dependencies found, retrieving metadata")
# we have missing deps
for dist in dists:
_update_infos(infos, get_infos(dist, index, installed))
@ -401,7 +401,7 @@ def remove(project_name, paths=sys.path, auto_confirm=True):
finally:
shutil.rmtree(tmp)
logger.info('removing %r: ', project_name)
logger.info('Removing %r: ', project_name)
for file_ in rmfiles:
logger.info(' %s', file_)
@ -444,20 +444,20 @@ def remove(project_name, paths=sys.path, auto_confirm=True):
if os.path.exists(dist.path):
shutil.rmtree(dist.path)
logger.info('success: removed %d files and %d dirs',
logger.info('Success: removed %d files and %d dirs',
file_count, dir_count)
def install(project):
logger.info('getting information about %r', project)
logger.info('Getting information about %r...', project)
try:
info = get_infos(project)
except InstallationException:
logger.info('cound not find %r', project)
logger.info('Cound not find %r', project)
return
if info['install'] == []:
logger.info('nothing to install')
logger.info('Nothing to install')
return
install_path = get_config_var('base')

View File

@ -396,22 +396,24 @@ class Metadata:
value = []
if logger.isEnabledFor(logging.WARNING):
project_name = self['Name']
if name in _PREDICATE_FIELDS and value is not None:
for v in value:
# check that the values are valid predicates
if not is_valid_predicate(v.split(';')[0]):
logger.warning(
'%r is not a valid predicate (field %r)',
v, name)
'%r: %r is not a valid predicate (field %r)',
project_name, v, name)
# FIXME this rejects UNKNOWN, is that right?
elif name in _VERSIONS_FIELDS and value is not None:
if not is_valid_versions(value):
logger.warning('%r is not a valid version (field %r)',
value, name)
logger.warning('%r: %r is not a valid version (field %r)',
project_name, value, name)
elif name in _VERSION_FIELDS and value is not None:
if not is_valid_version(value):
logger.warning('%r is not a valid version (field %r)',
value, name)
logger.warning('%r: %r is not a valid version (field %r)',
project_name, value, name)
if name in _UNICODEFIELDS:
if name == 'Description':

View File

@ -118,9 +118,10 @@ class Crawler(BaseClient):
def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False,
prefer_source=True, hosts=DEFAULT_HOSTS,
follow_externals=False, mirrors_url=None, mirrors=None,
timeout=SOCKET_TIMEOUT, mirrors_max_tries=0):
timeout=SOCKET_TIMEOUT, mirrors_max_tries=0, verbose=False):
super(Crawler, self).__init__(prefer_final, prefer_source)
self.follow_externals = follow_externals
self.verbose = verbose
# mirroring attributes.
parsed = urllib.parse.urlparse(index_url)
@ -184,7 +185,7 @@ class Crawler(BaseClient):
if predicate.name.lower() in self._projects and not force_update:
return self._projects.get(predicate.name.lower())
prefer_final = self._get_prefer_final(prefer_final)
logger.info('reading info on PyPI about %s', predicate.name)
logger.debug('Reading info on PyPI about %s', predicate.name)
self._process_index_page(predicate.name)
if predicate.name.lower() not in self._projects:
@ -321,8 +322,9 @@ class Crawler(BaseClient):
infos = get_infos_from_url(link, project_name,
is_external=not self.index_url in url)
except CantParseArchiveName as e:
logger.warning(
"version has not been parsed: %s", e)
if self.verbose:
logger.warning(
"version has not been parsed: %s", e)
else:
self._register_release(release_info=infos)
else:

View File

@ -1,25 +0,0 @@
"""Data file path abstraction.
Functions in this module use sysconfig to find the paths to the resource
files registered in project's setup.cfg file. See the documentation for
more information.
"""
# TODO write that documentation
from packaging.database import get_distribution
__all__ = ['get_file_path', 'get_file']
def get_file_path(distribution_name, relative_path):
"""Return the path to a resource file."""
dist = get_distribution(distribution_name)
if dist != None:
return dist.get_resource_path(relative_path)
raise LookupError('no distribution named %r found' % distribution_name)
def get_file(distribution_name, relative_path, *args, **kwargs):
"""Open and return a resource file."""
return open(get_file_path(distribution_name, relative_path),
*args, **kwargs)

View File

@ -5,6 +5,7 @@ import re
import sys
import getopt
import logging
from copy import copy
from packaging import logger
from packaging.dist import Distribution
@ -227,12 +228,13 @@ def _install(dispatcher, args, **kw):
logger.warning('no project to install')
return
target = args[1]
# installing from a source dir or archive file?
if os.path.isdir(args[1]) or _is_archive_file(args[1]):
install_local_project(args[1])
if os.path.isdir(target) or _is_archive_file(target):
install_local_project(target)
else:
# download from PyPI
install(args[1])
install(target)
@action_help(metadata_usage)
@ -399,6 +401,17 @@ class Dispatcher:
msg = 'Unrecognized action "%s"' % self.action
raise PackagingArgError(msg)
self._set_logger()
# for display options we return immediately
option_order = self.parser.get_option_order()
self.args = args
if self.help or self.action is None:
self._show_help(self.parser, display_options_=False)
def _set_logger(self):
# setting up the logging level from the command-line options
# -q gets warning, error and critical
if self.verbose == 0:
@ -416,13 +429,11 @@ class Dispatcher:
else: # -vv and more for debug
level = logging.DEBUG
# for display options we return immediately
option_order = self.parser.get_option_order()
self.args = args
if self.help or self.action is None:
self._show_help(self.parser, display_options_=False)
# setting up the stream handler
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(level)
logger.addHandler(handler)
logger.setLevel(level)
def _parse_command_opts(self, parser, args):
# Pull the current command from the head of the command line
@ -635,11 +646,17 @@ class Dispatcher:
def main(args=None):
dispatcher = Dispatcher(args)
if dispatcher.action is None:
return
old_level = logger.level
old_handlers = copy(logger.handlers)
try:
dispatcher = Dispatcher(args)
if dispatcher.action is None:
return
return dispatcher()
finally:
logger.setLevel(old_level)
logger.handlers[:] = old_handlers
return dispatcher()
if __name__ == '__main__':
sys.exit(main())

View File

@ -150,8 +150,7 @@ class TestTest(TempdirManager,
cmd.tests_require = [phony_project]
cmd.ensure_finalized()
logs = self.get_logs(logging.WARNING)
self.assertEqual(1, len(logs))
self.assertIn(phony_project, logs[0])
self.assertIn(phony_project, logs[-1])
def prepare_a_module(self):
tmp_dir = self.mkdtemp()

View File

@ -1,23 +1,25 @@
import os
import io
import csv
import imp
import sys
import shutil
import zipfile
import tempfile
from os.path import relpath # separate import for backport concerns
from hashlib import md5
from textwrap import dedent
from packaging.errors import PackagingError
from packaging.metadata import Metadata
from packaging.tests import unittest, run_unittest, support, TESTFN
from packaging.tests.test_util import GlobTestCaseBase
from packaging.tests.support import requires_zlib
from packaging.config import get_resources_dests
from packaging.errors import PackagingError
from packaging.metadata import Metadata
from packaging.tests import unittest, support
from packaging.database import (
Distribution, EggInfoDistribution, get_distribution, get_distributions,
provides_distribution, obsoletes_distribution, get_file_users,
enable_cache, disable_cache, distinfo_dirname, _yield_distributions)
enable_cache, disable_cache, distinfo_dirname, _yield_distributions,
get_file, get_file_path)
# TODO Add a test for getting a distribution provided by another distribution
# TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini)
@ -504,12 +506,161 @@ class TestDatabase(support.LoggingCatcher,
checkLists(dists + eggs, found)
class DataFilesTestCase(GlobTestCaseBase):
def assertRulesMatch(self, rules, spec):
tempdir = self.build_files_tree(spec)
expected = self.clean_tree(spec)
result = get_resources_dests(tempdir, rules)
self.assertEqual(expected, result)
def clean_tree(self, spec):
files = {}
for path, value in spec.items():
if value is not None:
files[path] = value
return files
def test_simple_glob(self):
rules = [('', '*.tpl', '{data}')]
spec = {'coucou.tpl': '{data}/coucou.tpl',
'Donotwant': None}
self.assertRulesMatch(rules, spec)
def test_multiple_match(self):
rules = [('scripts', '*.bin', '{appdata}'),
('scripts', '*', '{appscript}')]
spec = {'scripts/script.bin': '{appscript}/script.bin',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_set_match(self):
rules = [('scripts', '*.{bin,sh}', '{appscript}')]
spec = {'scripts/script.bin': '{appscript}/script.bin',
'scripts/babar.sh': '{appscript}/babar.sh',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_set_match_multiple(self):
rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
'scripts/script.sh': '{appscript}/script.sh',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_set_match_exclude(self):
rules = [('scripts', '*', '{appscript}'),
('', os.path.join('**', '*.sh'), None)]
spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
'scripts/script.sh': None,
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_glob_in_base(self):
rules = [('scrip*', '*.bin', '{appscript}')]
spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
'scripouille/babar.bin': '{appscript}/babar.bin',
'scriptortu/lotus.bin': '{appscript}/lotus.bin',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_recursive_glob(self):
rules = [('', os.path.join('**', '*.bin'), '{binary}')]
spec = {'binary0.bin': '{binary}/binary0.bin',
'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
'you/kill/pandabear.guy': None}
self.assertRulesMatch(rules, spec)
def test_final_exemple_glob(self):
rules = [
('mailman/database/schemas/', '*', '{appdata}/schemas'),
('', os.path.join('**', '*.tpl'), '{appdata}/templates'),
('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'),
('', 'README', '{doc}'),
('mailman/etc/', '*', '{config}'),
('mailman/foo/', os.path.join('**', 'bar', '*.cfg'),
'{config}/baz'),
('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'),
('', 'some-new-semantic.sns', '{funky-crazy-category}'),
]
spec = {
'README': '{doc}/README',
'some.tpl': '{appdata}/templates/some.tpl',
'some-new-semantic.sns':
'{funky-crazy-category}/some-new-semantic.sns',
'mailman/database/mailman.db': None,
'mailman/database/schemas/blah.schema':
'{appdata}/schemas/blah.schema',
'mailman/etc/my.cnf': '{config}/my.cnf',
'mailman/foo/some/path/bar/my.cfg':
'{config}/hmm/some/path/bar/my.cfg',
'mailman/foo/some/path/other.cfg':
'{config}/hmm/some/path/other.cfg',
'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
}
self.maxDiff = None
self.assertRulesMatch(rules, spec)
def test_get_file(self):
# Create a fake dist
temp_site_packages = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, temp_site_packages)
dist_name = 'test'
dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
os.mkdir(dist_info)
metadata_path = os.path.join(dist_info, 'METADATA')
resources_path = os.path.join(dist_info, 'RESOURCES')
with open(metadata_path, 'w') as fp:
fp.write(dedent("""\
Metadata-Version: 1.2
Name: test
Version: 0.1
Summary: test
Author: me
"""))
test_path = 'test.cfg'
fd, test_resource_path = tempfile.mkstemp()
os.close(fd)
self.addCleanup(os.remove, test_resource_path)
with open(test_resource_path, 'w') as fp:
fp.write('Config')
with open(resources_path, 'w') as fp:
fp.write('%s,%s' % (test_path, test_resource_path))
# Add fake site-packages to sys.path to retrieve fake dist
self.addCleanup(sys.path.remove, temp_site_packages)
sys.path.insert(0, temp_site_packages)
# Force packaging.database to rescan the sys.path
self.addCleanup(enable_cache)
disable_cache()
# Try to retrieve resources paths and files
self.assertEqual(get_file_path(dist_name, test_path),
test_resource_path)
self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist')
with get_file(dist_name, test_path) as fp:
self.assertEqual(fp.read(), 'Config')
self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist')
def test_suite():
suite = unittest.TestSuite()
load = unittest.defaultTestLoader.loadTestsFromTestCase
suite.addTest(load(TestDistribution))
suite.addTest(load(TestEggInfoDistribution))
suite.addTest(load(TestDatabase))
suite.addTest(load(DataFilesTestCase))
return suite

View File

@ -1,167 +0,0 @@
"""Tests for packaging.resources."""
import os
import sys
import shutil
import tempfile
from textwrap import dedent
from packaging.config import get_resources_dests
from packaging.database import disable_cache, enable_cache
from packaging.resources import get_file, get_file_path
from packaging.tests import unittest
from packaging.tests.test_util import GlobTestCaseBase
class DataFilesTestCase(GlobTestCaseBase):
def assertRulesMatch(self, rules, spec):
tempdir = self.build_files_tree(spec)
expected = self.clean_tree(spec)
result = get_resources_dests(tempdir, rules)
self.assertEqual(expected, result)
def clean_tree(self, spec):
files = {}
for path, value in spec.items():
if value is not None:
files[path] = value
return files
def test_simple_glob(self):
rules = [('', '*.tpl', '{data}')]
spec = {'coucou.tpl': '{data}/coucou.tpl',
'Donotwant': None}
self.assertRulesMatch(rules, spec)
def test_multiple_match(self):
rules = [('scripts', '*.bin', '{appdata}'),
('scripts', '*', '{appscript}')]
spec = {'scripts/script.bin': '{appscript}/script.bin',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_set_match(self):
rules = [('scripts', '*.{bin,sh}', '{appscript}')]
spec = {'scripts/script.bin': '{appscript}/script.bin',
'scripts/babar.sh': '{appscript}/babar.sh',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_set_match_multiple(self):
rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
'scripts/script.sh': '{appscript}/script.sh',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_set_match_exclude(self):
rules = [('scripts', '*', '{appscript}'),
('', os.path.join('**', '*.sh'), None)]
spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
'scripts/script.sh': None,
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_glob_in_base(self):
rules = [('scrip*', '*.bin', '{appscript}')]
spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
'scripouille/babar.bin': '{appscript}/babar.bin',
'scriptortu/lotus.bin': '{appscript}/lotus.bin',
'Babarlikestrawberry': None}
self.assertRulesMatch(rules, spec)
def test_recursive_glob(self):
rules = [('', os.path.join('**', '*.bin'), '{binary}')]
spec = {'binary0.bin': '{binary}/binary0.bin',
'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
'you/kill/pandabear.guy': None}
self.assertRulesMatch(rules, spec)
def test_final_exemple_glob(self):
rules = [
('mailman/database/schemas/', '*', '{appdata}/schemas'),
('', os.path.join('**', '*.tpl'), '{appdata}/templates'),
('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'),
('', 'README', '{doc}'),
('mailman/etc/', '*', '{config}'),
('mailman/foo/', os.path.join('**', 'bar', '*.cfg'), '{config}/baz'),
('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'),
('', 'some-new-semantic.sns', '{funky-crazy-category}'),
]
spec = {
'README': '{doc}/README',
'some.tpl': '{appdata}/templates/some.tpl',
'some-new-semantic.sns':
'{funky-crazy-category}/some-new-semantic.sns',
'mailman/database/mailman.db': None,
'mailman/database/schemas/blah.schema':
'{appdata}/schemas/blah.schema',
'mailman/etc/my.cnf': '{config}/my.cnf',
'mailman/foo/some/path/bar/my.cfg':
'{config}/hmm/some/path/bar/my.cfg',
'mailman/foo/some/path/other.cfg':
'{config}/hmm/some/path/other.cfg',
'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
}
self.maxDiff = None
self.assertRulesMatch(rules, spec)
def test_get_file(self):
# Create a fake dist
temp_site_packages = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, temp_site_packages)
dist_name = 'test'
dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
os.mkdir(dist_info)
metadata_path = os.path.join(dist_info, 'METADATA')
resources_path = os.path.join(dist_info, 'RESOURCES')
with open(metadata_path, 'w') as fp:
fp.write(dedent("""\
Metadata-Version: 1.2
Name: test
Version: 0.1
Summary: test
Author: me
"""))
test_path = 'test.cfg'
fd, test_resource_path = tempfile.mkstemp()
os.close(fd)
self.addCleanup(os.remove, test_resource_path)
with open(test_resource_path, 'w') as fp:
fp.write('Config')
with open(resources_path, 'w') as fp:
fp.write('%s,%s' % (test_path, test_resource_path))
# Add fake site-packages to sys.path to retrieve fake dist
self.addCleanup(sys.path.remove, temp_site_packages)
sys.path.insert(0, temp_site_packages)
# Force packaging.database to rescan the sys.path
self.addCleanup(enable_cache)
disable_cache()
# Try to retrieve resources paths and files
self.assertEqual(get_file_path(dist_name, test_path),
test_resource_path)
self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist')
with get_file(dist_name, test_path) as fp:
self.assertEqual(fp.read(), 'Config')
self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist')
def test_suite():
return unittest.makeSuite(DataFilesTestCase)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')

View File

@ -1,6 +1,7 @@
"""Tests for the uninstall command."""
import os
import sys
from io import StringIO
from packaging.database import disable_cache, enable_cache
from packaging.run import main
@ -79,7 +80,12 @@ class UninstallTestCase(support.TempdirManager,
if not dirname:
dirname = self.make_dist(name, **kw)
os.chdir(dirname)
dist = self.run_setup('install_dist', '--prefix=' + self.root_dir)
old_out = sys.stdout
sys.stderr = StringIO()
try:
dist = self.run_setup('install_dist', '--prefix=' + self.root_dir)
finally:
sys.sterr = old_out
install_lib = self.get_path(dist, 'purelib')
return dist, install_lib

View File

@ -818,51 +818,51 @@ class PackagingLibChecks(support.TempdirManager,
def test_is_setuptools_logs_setup_py_text_found(self):
is_setuptools(self._setuptools_setup_py_pkg())
expected = ['setup.py file found', 'found setuptools text in setup.py']
self.assertEqual(expected, self.get_logs(logging.INFO))
expected = ['setup.py file found.',
'No egg-info directory found.',
'Found setuptools text in setup.py.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def test_is_setuptools_logs_setup_py_text_not_found(self):
directory = self._random_setup_py_pkg()
is_setuptools(directory)
info_expected = ['setup.py file found']
warn_expected = ['no egg-info directory found',
'no setuptools text found in setup.py']
self.assertEqual(info_expected, self.get_logs(logging.INFO))
self.assertEqual(warn_expected, self.get_logs(logging.WARN))
expected = ['setup.py file found.', 'No egg-info directory found.',
'No setuptools text found in setup.py.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def test_is_setuptools_logs_egg_info_dir_found(self):
is_setuptools(self._setuptools_egg_info_pkg())
expected = ['setup.py file found', 'found egg-info directory']
self.assertEqual(expected, self.get_logs(logging.INFO))
expected = ['setup.py file found.', 'Found egg-info directory.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def test_is_distutils_logs_setup_py_text_found(self):
is_distutils(self._distutils_setup_py_pkg())
expected = ['setup.py file found', 'found distutils text in setup.py']
self.assertEqual(expected, self.get_logs(logging.INFO))
expected = ['setup.py file found.',
'No PKG-INFO file found.',
'Found distutils text in setup.py.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def test_is_distutils_logs_setup_py_text_not_found(self):
directory = self._random_setup_py_pkg()
is_distutils(directory)
info_expected = ['setup.py file found']
warn_expected = ['no PKG-INFO file found',
'no distutils text found in setup.py']
self.assertEqual(info_expected, self.get_logs(logging.INFO))
self.assertEqual(warn_expected, self.get_logs(logging.WARN))
expected = ['setup.py file found.', 'No PKG-INFO file found.',
'No distutils text found in setup.py.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def test_is_distutils_logs_pkg_info_file_found(self):
is_distutils(self._distutils_pkg_info())
expected = ['setup.py file found', 'PKG-INFO file found']
self.assertEqual(expected, self.get_logs(logging.INFO))
expected = ['setup.py file found.', 'PKG-INFO file found.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def test_is_packaging_logs_setup_cfg_found(self):
is_packaging(self._valid_setup_cfg_pkg())
expected = ['setup.cfg file found']
self.assertEqual(expected, self.get_logs(logging.INFO))
expected = ['setup.cfg file found.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def test_is_packaging_logs_setup_cfg_not_found(self):
is_packaging(self._empty_dir)
expected = ['no setup.cfg file found']
self.assertEqual(expected, self.get_logs(logging.WARN))
expected = ['No setup.cfg file found.']
self.assertEqual(expected, self.get_logs(logging.DEBUG))
def _write_setuptools_setup_py(self, directory):
self.write_file((directory, 'setup.py'),

View File

@ -1224,9 +1224,9 @@ def _has_egg_info(srcdir):
for item in os.listdir(srcdir):
full_path = os.path.join(srcdir, item)
if item.endswith('.egg-info') and os.path.isdir(full_path):
logger.info("found egg-info directory")
logger.debug("Found egg-info directory.")
return True
logger.warning("no egg-info directory found")
logger.debug("No egg-info directory found.")
return False
@ -1243,9 +1243,9 @@ def _has_text(setup_py, installer):
with open(setup_py, 'r', encoding='utf-8') as setup:
for line in setup:
if re.search(installer_pattern, line):
logger.info("found %s text in setup.py", installer)
logger.debug("Found %s text in setup.py.", installer)
return True
logger.warning("no %s text found in setup.py", installer)
logger.debug("No %s text found in setup.py.", installer)
return False
@ -1261,15 +1261,16 @@ def _has_pkg_info(srcdir):
pkg_info = os.path.join(srcdir, 'PKG-INFO')
has_pkg_info = os.path.isfile(pkg_info)
if has_pkg_info:
logger.info("PKG-INFO file found")
logger.warning("no PKG-INFO file found")
logger.debug("PKG-INFO file found.")
else:
logger.debug("No PKG-INFO file found.")
return has_pkg_info
def _has_setup_py(srcdir):
setup_py = os.path.join(srcdir, 'setup.py')
if os.path.isfile(setup_py):
logger.info('setup.py file found')
logger.debug('setup.py file found.')
return True
return False
@ -1277,9 +1278,9 @@ def _has_setup_py(srcdir):
def _has_setup_cfg(srcdir):
setup_cfg = os.path.join(srcdir, 'setup.cfg')
if os.path.isfile(setup_cfg):
logger.info('setup.cfg file found')
logger.debug('setup.cfg file found.')
return True
logger.warning("no setup.cfg file found")
logger.debug("No setup.cfg file found.")
return False

View File

@ -260,14 +260,14 @@ class WaitTests(unittest.TestCase):
def test_timeout(self):
future1 = self.executor.submit(mul, 6, 7)
future2 = self.executor.submit(time.sleep, 3)
future2 = self.executor.submit(time.sleep, 6)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2],
timeout=1.5,
timeout=5,
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
@ -357,8 +357,8 @@ class ExecutorTest(unittest.TestCase):
results = []
try:
for i in self.executor.map(time.sleep,
[0, 0, 3],
timeout=1.5):
[0, 0, 6],
timeout=5):
results.append(i)
except futures.TimeoutError:
pass