mirror of
https://github.com/KevinMidboe/Node-Com-Handler.git
synced 2025-10-29 17:50:27 +00:00
Major cleanup, added all old files (python2) to 'old_v0.1' folder
This commit is contained in:
160
flask/lib/python3.4/site-packages/setuptools/__init__.py
Normal file
160
flask/lib/python3.4/site-packages/setuptools/__init__.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Extensions to the 'distutils' for large or complex distributions"""
|
||||
|
||||
import os
|
||||
import functools
|
||||
import distutils.core
|
||||
import distutils.filelist
|
||||
from distutils.util import convert_path
|
||||
from fnmatch import fnmatchcase
|
||||
|
||||
from six.moves import filter, map
|
||||
|
||||
import setuptools.version
|
||||
from setuptools.extension import Extension
|
||||
from setuptools.dist import Distribution, Feature
|
||||
from setuptools.depends import Require
|
||||
from . import monkey
|
||||
|
||||
__all__ = [
|
||||
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
|
||||
'find_packages',
|
||||
]
|
||||
|
||||
__version__ = setuptools.version.__version__
|
||||
|
||||
bootstrap_install_from = None
|
||||
|
||||
# If we run 2to3 on .py files, should we also convert docstrings?
|
||||
# Default: yes; assume that we can detect doctests reliably
|
||||
run_2to3_on_doctests = True
|
||||
# Standard package names for fixer packages
|
||||
lib2to3_fixer_packages = ['lib2to3.fixes']
|
||||
|
||||
|
||||
class PackageFinder(object):
|
||||
"""
|
||||
Generate a list of all Python packages found within a directory
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def find(cls, where='.', exclude=(), include=('*',)):
|
||||
"""Return a list all Python packages found within directory 'where'
|
||||
|
||||
'where' is the root directory which will be searched for packages. It
|
||||
should be supplied as a "cross-platform" (i.e. URL-style) path; it will
|
||||
be converted to the appropriate local path syntax.
|
||||
|
||||
'exclude' is a sequence of package names to exclude; '*' can be used
|
||||
as a wildcard in the names, such that 'foo.*' will exclude all
|
||||
subpackages of 'foo' (but not 'foo' itself).
|
||||
|
||||
'include' is a sequence of package names to include. If it's
|
||||
specified, only the named packages will be included. If it's not
|
||||
specified, all found packages will be included. 'include' can contain
|
||||
shell style wildcard patterns just like 'exclude'.
|
||||
"""
|
||||
|
||||
return list(cls._find_packages_iter(
|
||||
convert_path(where),
|
||||
cls._build_filter('ez_setup', '*__pycache__', *exclude),
|
||||
cls._build_filter(*include)))
|
||||
|
||||
@classmethod
|
||||
def _find_packages_iter(cls, where, exclude, include):
|
||||
"""
|
||||
All the packages found in 'where' that pass the 'include' filter, but
|
||||
not the 'exclude' filter.
|
||||
"""
|
||||
for root, dirs, files in os.walk(where, followlinks=True):
|
||||
# Copy dirs to iterate over it, then empty dirs.
|
||||
all_dirs = dirs[:]
|
||||
dirs[:] = []
|
||||
|
||||
for dir in all_dirs:
|
||||
full_path = os.path.join(root, dir)
|
||||
rel_path = os.path.relpath(full_path, where)
|
||||
package = rel_path.replace(os.path.sep, '.')
|
||||
|
||||
# Skip directory trees that are not valid packages
|
||||
if ('.' in dir or not cls._looks_like_package(full_path)):
|
||||
continue
|
||||
|
||||
# Should this package be included?
|
||||
if include(package) and not exclude(package):
|
||||
yield package
|
||||
|
||||
# Keep searching subdirectories, as there may be more packages
|
||||
# down there, even if the parent was excluded.
|
||||
dirs.append(dir)
|
||||
|
||||
@staticmethod
|
||||
def _looks_like_package(path):
|
||||
"""Does a directory look like a package?"""
|
||||
return os.path.isfile(os.path.join(path, '__init__.py'))
|
||||
|
||||
@staticmethod
|
||||
def _build_filter(*patterns):
|
||||
"""
|
||||
Given a list of patterns, return a callable that will be true only if
|
||||
the input matches at least one of the patterns.
|
||||
"""
|
||||
return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
|
||||
|
||||
|
||||
class PEP420PackageFinder(PackageFinder):
|
||||
@staticmethod
|
||||
def _looks_like_package(path):
|
||||
return True
|
||||
|
||||
|
||||
find_packages = PackageFinder.find
|
||||
|
||||
setup = distutils.core.setup
|
||||
|
||||
_Command = monkey.get_unpatched(distutils.core.Command)
|
||||
|
||||
|
||||
class Command(_Command):
|
||||
__doc__ = _Command.__doc__
|
||||
|
||||
command_consumes_arguments = False
|
||||
|
||||
def __init__(self, dist, **kw):
|
||||
"""
|
||||
Construct the command for dist, updating
|
||||
vars(self) with any keyword parameters.
|
||||
"""
|
||||
_Command.__init__(self, dist)
|
||||
vars(self).update(kw)
|
||||
|
||||
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
||||
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
||||
vars(cmd).update(kw)
|
||||
return cmd
|
||||
|
||||
|
||||
def _find_all_simple(path):
|
||||
"""
|
||||
Find all files under 'path'
|
||||
"""
|
||||
results = (
|
||||
os.path.join(base, file)
|
||||
for base, dirs, files in os.walk(path, followlinks=True)
|
||||
for file in files
|
||||
)
|
||||
return filter(os.path.isfile, results)
|
||||
|
||||
|
||||
def findall(dir=os.curdir):
|
||||
"""
|
||||
Find all files under 'dir' and return the list of full filenames.
|
||||
Unless dir is '.', return full filenames with dir prepended.
|
||||
"""
|
||||
files = _find_all_simple(dir)
|
||||
if dir == os.curdir:
|
||||
make_rel = functools.partial(os.path.relpath, start=dir)
|
||||
files = map(make_rel, files)
|
||||
return list(files)
|
||||
|
||||
|
||||
monkey.patch_all()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
173
flask/lib/python3.4/site-packages/setuptools/archive_util.py
Normal file
173
flask/lib/python3.4/site-packages/setuptools/archive_util.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""Utilities for extracting common archive formats"""
|
||||
|
||||
import zipfile
|
||||
import tarfile
|
||||
import os
|
||||
import shutil
|
||||
import posixpath
|
||||
import contextlib
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
from pkg_resources import ensure_directory, ContextualZipFile
|
||||
|
||||
__all__ = [
|
||||
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
|
||||
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
|
||||
]
|
||||
|
||||
|
||||
class UnrecognizedFormat(DistutilsError):
|
||||
"""Couldn't recognize the archive type"""
|
||||
|
||||
|
||||
def default_filter(src, dst):
|
||||
"""The default progress/filter callback; returns True for all files"""
|
||||
return dst
|
||||
|
||||
|
||||
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
|
||||
drivers=None):
|
||||
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
||||
|
||||
`progress_filter` is a function taking two arguments: a source path
|
||||
internal to the archive ('/'-separated), and a filesystem path where it
|
||||
will be extracted. The callback must return the desired extract path
|
||||
(which may be the same as the one passed in), or else ``None`` to skip
|
||||
that file or directory. The callback can thus be used to report on the
|
||||
progress of the extraction, as well as to filter the items extracted or
|
||||
alter their extraction paths.
|
||||
|
||||
`drivers`, if supplied, must be a non-empty sequence of functions with the
|
||||
same signature as this function (minus the `drivers` argument), that raise
|
||||
``UnrecognizedFormat`` if they do not support extracting the designated
|
||||
archive type. The `drivers` are tried in sequence until one is found that
|
||||
does not raise an error, or until all are exhausted (in which case
|
||||
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
|
||||
drivers, the module's ``extraction_drivers`` constant will be used, which
|
||||
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
|
||||
order.
|
||||
"""
|
||||
for driver in drivers or extraction_drivers:
|
||||
try:
|
||||
driver(filename, extract_dir, progress_filter)
|
||||
except UnrecognizedFormat:
|
||||
continue
|
||||
else:
|
||||
return
|
||||
else:
|
||||
raise UnrecognizedFormat(
|
||||
"Not a recognized archive type: %s" % filename
|
||||
)
|
||||
|
||||
|
||||
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
||||
""""Unpack" a directory, using the same interface as for archives
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a directory
|
||||
"""
|
||||
if not os.path.isdir(filename):
|
||||
raise UnrecognizedFormat("%s is not a directory" % filename)
|
||||
|
||||
paths = {
|
||||
filename: ('', extract_dir),
|
||||
}
|
||||
for base, dirs, files in os.walk(filename):
|
||||
src, dst = paths[base]
|
||||
for d in dirs:
|
||||
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
|
||||
for f in files:
|
||||
target = os.path.join(dst, f)
|
||||
target = progress_filter(src + f, target)
|
||||
if not target:
|
||||
# skip non-files
|
||||
continue
|
||||
ensure_directory(target)
|
||||
f = os.path.join(base, f)
|
||||
shutil.copyfile(f, target)
|
||||
shutil.copystat(f, target)
|
||||
|
||||
|
||||
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
||||
"""Unpack zip `filename` to `extract_dir`
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
|
||||
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
|
||||
of the `progress_filter` argument.
|
||||
"""
|
||||
|
||||
if not zipfile.is_zipfile(filename):
|
||||
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
|
||||
|
||||
with ContextualZipFile(filename) as z:
|
||||
for info in z.infolist():
|
||||
name = info.filename
|
||||
|
||||
# don't extract absolute paths or ones with .. in them
|
||||
if name.startswith('/') or '..' in name.split('/'):
|
||||
continue
|
||||
|
||||
target = os.path.join(extract_dir, *name.split('/'))
|
||||
target = progress_filter(name, target)
|
||||
if not target:
|
||||
continue
|
||||
if name.endswith('/'):
|
||||
# directory
|
||||
ensure_directory(target)
|
||||
else:
|
||||
# file
|
||||
ensure_directory(target)
|
||||
data = z.read(info.filename)
|
||||
with open(target, 'wb') as f:
|
||||
f.write(data)
|
||||
unix_attributes = info.external_attr >> 16
|
||||
if unix_attributes:
|
||||
os.chmod(target, unix_attributes)
|
||||
|
||||
|
||||
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
||||
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
|
||||
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
|
||||
of the `progress_filter` argument.
|
||||
"""
|
||||
try:
|
||||
tarobj = tarfile.open(filename)
|
||||
except tarfile.TarError:
|
||||
raise UnrecognizedFormat(
|
||||
"%s is not a compressed or uncompressed tar file" % (filename,)
|
||||
)
|
||||
with contextlib.closing(tarobj):
|
||||
# don't do any chowning!
|
||||
tarobj.chown = lambda *args: None
|
||||
for member in tarobj:
|
||||
name = member.name
|
||||
# don't extract absolute paths or ones with .. in them
|
||||
if not name.startswith('/') and '..' not in name.split('/'):
|
||||
prelim_dst = os.path.join(extract_dir, *name.split('/'))
|
||||
|
||||
# resolve any links and to extract the link targets as normal
|
||||
# files
|
||||
while member is not None and (member.islnk() or member.issym()):
|
||||
linkpath = member.linkname
|
||||
if member.issym():
|
||||
base = posixpath.dirname(member.name)
|
||||
linkpath = posixpath.join(base, linkpath)
|
||||
linkpath = posixpath.normpath(linkpath)
|
||||
member = tarobj._getmember(linkpath)
|
||||
|
||||
if member is not None and (member.isfile() or member.isdir()):
|
||||
final_dst = progress_filter(name, prelim_dst)
|
||||
if final_dst:
|
||||
if final_dst.endswith(os.sep):
|
||||
final_dst = final_dst[:-1]
|
||||
try:
|
||||
# XXX Ugh
|
||||
tarobj._extract_member(member, final_dst)
|
||||
except tarfile.ExtractError:
|
||||
# chown/chmod/mkfifo/mknode/makedev failed
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
||||
BIN
flask/lib/python3.4/site-packages/setuptools/cli-32.exe
Normal file
BIN
flask/lib/python3.4/site-packages/setuptools/cli-32.exe
Normal file
Binary file not shown.
BIN
flask/lib/python3.4/site-packages/setuptools/cli-64.exe
Normal file
BIN
flask/lib/python3.4/site-packages/setuptools/cli-64.exe
Normal file
Binary file not shown.
BIN
flask/lib/python3.4/site-packages/setuptools/cli.exe
Normal file
BIN
flask/lib/python3.4/site-packages/setuptools/cli.exe
Normal file
Binary file not shown.
@@ -0,0 +1,17 @@
|
||||
__all__ = [
|
||||
'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
|
||||
'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
|
||||
'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
|
||||
'register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib',
|
||||
]
|
||||
|
||||
from distutils.command.bdist import bdist
|
||||
import sys
|
||||
|
||||
from setuptools.command import install_scripts
|
||||
|
||||
if 'egg' not in bdist.format_commands:
|
||||
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
||||
bdist.format_commands.append('egg')
|
||||
|
||||
del bdist, sys
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,80 @@
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
from six.moves import map
|
||||
|
||||
from setuptools.command.setopt import edit_config, option_base, config_file
|
||||
|
||||
|
||||
def shquote(arg):
|
||||
"""Quote an argument for later parsing by shlex.split()"""
|
||||
for c in '"', "'", "\\", "#":
|
||||
if c in arg:
|
||||
return repr(arg)
|
||||
if arg.split() != [arg]:
|
||||
return repr(arg)
|
||||
return arg
|
||||
|
||||
|
||||
class alias(option_base):
|
||||
"""Define a shortcut that invokes one or more commands"""
|
||||
|
||||
description = "define a shortcut to invoke one or more commands"
|
||||
command_consumes_arguments = True
|
||||
|
||||
user_options = [
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
||||
def initialize_options(self):
|
||||
option_base.initialize_options(self)
|
||||
self.args = None
|
||||
self.remove = None
|
||||
|
||||
def finalize_options(self):
|
||||
option_base.finalize_options(self)
|
||||
if self.remove and len(self.args) != 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify exactly one argument (the alias name) when "
|
||||
"using --remove"
|
||||
)
|
||||
|
||||
def run(self):
|
||||
aliases = self.distribution.get_option_dict('aliases')
|
||||
|
||||
if not self.args:
|
||||
print("Command Aliases")
|
||||
print("---------------")
|
||||
for alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
|
||||
elif len(self.args) == 1:
|
||||
alias, = self.args
|
||||
if self.remove:
|
||||
command = None
|
||||
elif alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
else:
|
||||
print("No alias definition found for %r" % alias)
|
||||
return
|
||||
else:
|
||||
alias = self.args[0]
|
||||
command = ' '.join(map(shquote, self.args[1:]))
|
||||
|
||||
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
||||
|
||||
|
||||
def format_alias(name, aliases):
|
||||
source, command = aliases[name]
|
||||
if source == config_file('global'):
|
||||
source = '--global-config '
|
||||
elif source == config_file('user'):
|
||||
source = '--user-config '
|
||||
elif source == config_file('local'):
|
||||
source = ''
|
||||
else:
|
||||
source = '--filename=%r' % source
|
||||
return source + name + ' ' + command
|
||||
@@ -0,0 +1,472 @@
|
||||
"""setuptools.command.bdist_egg
|
||||
|
||||
Build .egg distributions"""
|
||||
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
from distutils import log
|
||||
from types import CodeType
|
||||
import sys
|
||||
import os
|
||||
import textwrap
|
||||
import marshal
|
||||
|
||||
import six
|
||||
|
||||
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||
from pkg_resources import EntryPoint
|
||||
from setuptools.extension import Library
|
||||
from setuptools import Command
|
||||
|
||||
try:
|
||||
# Python 2.7 or >=3.2
|
||||
from sysconfig import get_path, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_path("purelib")
|
||||
except ImportError:
|
||||
from distutils.sysconfig import get_python_lib, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_python_lib(False)
|
||||
|
||||
|
||||
def strip_module(filename):
|
||||
if '.' in filename:
|
||||
filename = os.path.splitext(filename)[0]
|
||||
if filename.endswith('module'):
|
||||
filename = filename[:-6]
|
||||
return filename
|
||||
|
||||
|
||||
def write_stub(resource, pyfile):
|
||||
_stub_template = textwrap.dedent("""
|
||||
def __bootstrap__():
|
||||
global __bootstrap__, __loader__, __file__
|
||||
import sys, pkg_resources, imp
|
||||
__file__ = pkg_resources.resource_filename(__name__, %r)
|
||||
__loader__ = None; del __bootstrap__, __loader__
|
||||
imp.load_dynamic(__name__,__file__)
|
||||
__bootstrap__()
|
||||
""").lstrip()
|
||||
with open(pyfile, 'w') as f:
|
||||
f.write(_stub_template % resource)
|
||||
|
||||
|
||||
class bdist_egg(Command):
|
||||
description = "create an \"egg\" distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'b',
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p', "platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_build_platform()),
|
||||
('exclude-source-files', None,
|
||||
"remove all .py files from the generated egg"),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
'keep-temp', 'skip-build', 'exclude-source-files'
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = 0
|
||||
self.dist_dir = None
|
||||
self.skip_build = 0
|
||||
self.egg_output = None
|
||||
self.exclude_source_files = None
|
||||
|
||||
def finalize_options(self):
|
||||
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_info = ei_cmd.egg_info
|
||||
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'egg')
|
||||
|
||||
if self.plat_name is None:
|
||||
self.plat_name = get_build_platform()
|
||||
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
if self.egg_output is None:
|
||||
|
||||
# Compute filename of the output egg
|
||||
basename = Distribution(
|
||||
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
get_python_version(),
|
||||
self.distribution.has_ext_modules() and self.plat_name
|
||||
).egg_name()
|
||||
|
||||
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
||||
|
||||
def do_install_data(self):
|
||||
# Hack for packages that install data to install's --install-lib
|
||||
self.get_finalized_command('install').install_lib = self.bdist_dir
|
||||
|
||||
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
||||
old, self.distribution.data_files = self.distribution.data_files, []
|
||||
|
||||
for item in old:
|
||||
if isinstance(item, tuple) and len(item) == 2:
|
||||
if os.path.isabs(item[0]):
|
||||
realpath = os.path.realpath(item[0])
|
||||
normalized = os.path.normcase(realpath)
|
||||
if normalized == site_packages or normalized.startswith(
|
||||
site_packages + os.sep
|
||||
):
|
||||
item = realpath[len(site_packages) + 1:], item[1]
|
||||
# XXX else: raise ???
|
||||
self.distribution.data_files.append(item)
|
||||
|
||||
try:
|
||||
log.info("installing package data to %s", self.bdist_dir)
|
||||
self.call_command('install_data', force=0, root=None)
|
||||
finally:
|
||||
self.distribution.data_files = old
|
||||
|
||||
def get_outputs(self):
|
||||
return [self.egg_output]
|
||||
|
||||
def call_command(self, cmdname, **kw):
|
||||
"""Invoke reinitialized command `cmdname` with keyword args"""
|
||||
for dirname in INSTALL_DIRECTORY_ATTRS:
|
||||
kw.setdefault(dirname, self.bdist_dir)
|
||||
kw.setdefault('skip_build', self.skip_build)
|
||||
kw.setdefault('dry_run', self.dry_run)
|
||||
cmd = self.reinitialize_command(cmdname, **kw)
|
||||
self.run_command(cmdname)
|
||||
return cmd
|
||||
|
||||
def run(self):
|
||||
# Generate metadata first
|
||||
self.run_command("egg_info")
|
||||
# We run install_lib before install_data, because some data hacks
|
||||
# pull their data path from the install_lib command.
|
||||
log.info("installing library code to %s", self.bdist_dir)
|
||||
instcmd = self.get_finalized_command('install')
|
||||
old_root = instcmd.root
|
||||
instcmd.root = None
|
||||
if self.distribution.has_c_libraries() and not self.skip_build:
|
||||
self.run_command('build_clib')
|
||||
cmd = self.call_command('install_lib', warn_dir=0)
|
||||
instcmd.root = old_root
|
||||
|
||||
all_outputs, ext_outputs = self.get_ext_outputs()
|
||||
self.stubs = []
|
||||
to_compile = []
|
||||
for (p, ext_name) in enumerate(ext_outputs):
|
||||
filename, ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
|
||||
'.py')
|
||||
self.stubs.append(pyfile)
|
||||
log.info("creating stub loader for %s", ext_name)
|
||||
if not self.dry_run:
|
||||
write_stub(os.path.basename(ext_name), pyfile)
|
||||
to_compile.append(pyfile)
|
||||
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
||||
|
||||
if to_compile:
|
||||
cmd.byte_compile(to_compile)
|
||||
if self.distribution.data_files:
|
||||
self.do_install_data()
|
||||
|
||||
# Make the EGG-INFO directory
|
||||
archive_root = self.bdist_dir
|
||||
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
||||
self.mkpath(egg_info)
|
||||
if self.distribution.scripts:
|
||||
script_dir = os.path.join(egg_info, 'scripts')
|
||||
log.info("installing scripts to %s", script_dir)
|
||||
self.call_command('install_scripts', install_dir=script_dir,
|
||||
no_ep=1)
|
||||
|
||||
self.copy_metadata_to(egg_info)
|
||||
native_libs = os.path.join(egg_info, "native_libs.txt")
|
||||
if all_outputs:
|
||||
log.info("writing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
ensure_directory(native_libs)
|
||||
libs_file = open(native_libs, 'wt')
|
||||
libs_file.write('\n'.join(all_outputs))
|
||||
libs_file.write('\n')
|
||||
libs_file.close()
|
||||
elif os.path.isfile(native_libs):
|
||||
log.info("removing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
os.unlink(native_libs)
|
||||
|
||||
write_safety_flag(
|
||||
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
|
||||
)
|
||||
|
||||
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
)
|
||||
|
||||
if self.exclude_source_files:
|
||||
self.zap_pyfiles()
|
||||
|
||||
# Make the archive
|
||||
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
|
||||
dry_run=self.dry_run, mode=self.gen_header())
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_egg', get_python_version(), self.egg_output))
|
||||
|
||||
def zap_pyfiles(self):
|
||||
log.info("Removing .py files from temporary directory")
|
||||
for base, dirs, files in walk_egg(self.bdist_dir):
|
||||
for name in files:
|
||||
if name.endswith('.py'):
|
||||
path = os.path.join(base, name)
|
||||
log.debug("Deleting %s", path)
|
||||
os.unlink(path)
|
||||
|
||||
def zip_safe(self):
|
||||
safe = getattr(self.distribution, 'zip_safe', None)
|
||||
if safe is not None:
|
||||
return safe
|
||||
log.warn("zip_safe flag not set; analyzing archive contents...")
|
||||
return analyze_egg(self.bdist_dir, self.stubs)
|
||||
|
||||
def gen_header(self):
|
||||
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
|
||||
ep = epm.get('setuptools.installation', {}).get('eggsecutable')
|
||||
if ep is None:
|
||||
return 'w' # not an eggsecutable, do it the usual way.
|
||||
|
||||
if not ep.attrs or ep.extras:
|
||||
raise DistutilsSetupError(
|
||||
"eggsecutable entry point (%r) cannot have 'extras' "
|
||||
"or refer to a module" % (ep,)
|
||||
)
|
||||
|
||||
pyver = sys.version[:3]
|
||||
pkg = ep.module_name
|
||||
full = '.'.join(ep.attrs)
|
||||
base = ep.attrs[0]
|
||||
basename = os.path.basename(self.egg_output)
|
||||
|
||||
header = (
|
||||
"#!/bin/sh\n"
|
||||
'if [ `basename $0` = "%(basename)s" ]\n'
|
||||
'then exec python%(pyver)s -c "'
|
||||
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
|
||||
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
|
||||
'" "$@"\n'
|
||||
'else\n'
|
||||
' echo $0 is not the correct name for this egg file.\n'
|
||||
' echo Please rename it back to %(basename)s and try again.\n'
|
||||
' exec false\n'
|
||||
'fi\n'
|
||||
) % locals()
|
||||
|
||||
if not self.dry_run:
|
||||
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
|
||||
f = open(self.egg_output, 'w')
|
||||
f.write(header)
|
||||
f.close()
|
||||
return 'a'
|
||||
|
||||
def copy_metadata_to(self, target_dir):
|
||||
"Copy metadata (egg info) to the target_dir"
|
||||
# normalize the path (so that a forward-slash in egg_info will
|
||||
# match using startswith below)
|
||||
norm_egg_info = os.path.normpath(self.egg_info)
|
||||
prefix = os.path.join(norm_egg_info, '')
|
||||
for path in self.ei_cmd.filelist.files:
|
||||
if path.startswith(prefix):
|
||||
target = os.path.join(target_dir, path[len(prefix):])
|
||||
ensure_directory(target)
|
||||
self.copy_file(path, target)
|
||||
|
||||
def get_ext_outputs(self):
|
||||
"""Get a list of relative paths to C extensions in the output distro"""
|
||||
|
||||
all_outputs = []
|
||||
ext_outputs = []
|
||||
|
||||
paths = {self.bdist_dir: ''}
|
||||
for base, dirs, files in os.walk(self.bdist_dir):
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
||||
all_outputs.append(paths[base] + filename)
|
||||
for filename in dirs:
|
||||
paths[os.path.join(base, filename)] = (paths[base] +
|
||||
filename + '/')
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_cmd = self.get_finalized_command('build_ext')
|
||||
for ext in build_cmd.extensions:
|
||||
if isinstance(ext, Library):
|
||||
continue
|
||||
fullname = build_cmd.get_ext_fullname(ext.name)
|
||||
filename = build_cmd.get_ext_filename(fullname)
|
||||
if not os.path.basename(filename).startswith('dl-'):
|
||||
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
||||
ext_outputs.append(filename)
|
||||
|
||||
return all_outputs, ext_outputs
|
||||
|
||||
|
||||
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
||||
|
||||
|
||||
def walk_egg(egg_dir):
|
||||
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
||||
walker = os.walk(egg_dir)
|
||||
base, dirs, files = next(walker)
|
||||
if 'EGG-INFO' in dirs:
|
||||
dirs.remove('EGG-INFO')
|
||||
yield base, dirs, files
|
||||
for bdf in walker:
|
||||
yield bdf
|
||||
|
||||
|
||||
def analyze_egg(egg_dir, stubs):
|
||||
# check for existing flag in EGG-INFO
|
||||
for flag, fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
||||
return flag
|
||||
if not can_scan():
|
||||
return False
|
||||
safe = True
|
||||
for base, dirs, files in walk_egg(egg_dir):
|
||||
for name in files:
|
||||
if name.endswith('.py') or name.endswith('.pyw'):
|
||||
continue
|
||||
elif name.endswith('.pyc') or name.endswith('.pyo'):
|
||||
# always scan, even if we already know we're not safe
|
||||
safe = scan_module(egg_dir, base, name, stubs) and safe
|
||||
return safe
|
||||
|
||||
|
||||
def write_safety_flag(egg_dir, safe):
|
||||
# Write or remove zip safety flag file(s)
|
||||
for flag, fn in safety_flags.items():
|
||||
fn = os.path.join(egg_dir, fn)
|
||||
if os.path.exists(fn):
|
||||
if safe is None or bool(safe) != flag:
|
||||
os.unlink(fn)
|
||||
elif safe is not None and bool(safe) == flag:
|
||||
f = open(fn, 'wt')
|
||||
f.write('\n')
|
||||
f.close()
|
||||
|
||||
|
||||
safety_flags = {
|
||||
True: 'zip-safe',
|
||||
False: 'not-zip-safe',
|
||||
}
|
||||
|
||||
|
||||
def scan_module(egg_dir, base, name, stubs):
|
||||
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
||||
|
||||
filename = os.path.join(base, name)
|
||||
if filename[:-1] in stubs:
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
|
||||
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
||||
if sys.version_info < (3, 3):
|
||||
skip = 8 # skip magic & date
|
||||
else:
|
||||
skip = 12 # skip magic & date & file size
|
||||
f = open(filename, 'rb')
|
||||
f.read(skip)
|
||||
code = marshal.load(f)
|
||||
f.close()
|
||||
safe = True
|
||||
symbols = dict.fromkeys(iter_symbols(code))
|
||||
for bad in ['__file__', '__path__']:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module references %s", module, bad)
|
||||
safe = False
|
||||
if 'inspect' in symbols:
|
||||
for bad in [
|
||||
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
|
||||
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
|
||||
'getinnerframes', 'getouterframes', 'stack', 'trace'
|
||||
]:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||
safe = False
|
||||
return safe
|
||||
|
||||
|
||||
def iter_symbols(code):
|
||||
"""Yield names and strings used by `code` and its nested code objects"""
|
||||
for name in code.co_names:
|
||||
yield name
|
||||
for const in code.co_consts:
|
||||
if isinstance(const, six.string_types):
|
||||
yield const
|
||||
elif isinstance(const, CodeType):
|
||||
for name in iter_symbols(const):
|
||||
yield name
|
||||
|
||||
|
||||
def can_scan():
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
# CPython, PyPy, etc.
|
||||
return True
|
||||
log.warn("Unable to analyze compiled code on this platform.")
|
||||
log.warn("Please ask the author to include a 'zip_safe'"
|
||||
" setting (either True or False) in the package's setup.py")
|
||||
|
||||
|
||||
# Attribute names of options for commands that might need to be convinced to
|
||||
# install to the egg build directory
|
||||
|
||||
INSTALL_DIRECTORY_ATTRS = [
|
||||
'install_lib', 'install_dir', 'install_data', 'install_base'
|
||||
]
|
||||
|
||||
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
|
||||
mode='w'):
|
||||
"""Create a zip file from all the files under 'base_dir'. The output
|
||||
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||
Python module (if available) or the InfoZIP "zip" utility (if installed
|
||||
and found on the default search path). If neither tool is available,
|
||||
raises DistutilsExecError. Returns the name of the output zip file.
|
||||
"""
|
||||
import zipfile
|
||||
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
def visit(z, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.normpath(os.path.join(dirname, name))
|
||||
if os.path.isfile(path):
|
||||
p = path[len(base_dir) + 1:]
|
||||
if not dry_run:
|
||||
z.write(path, p)
|
||||
log.debug("adding '%s'", p)
|
||||
|
||||
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
|
||||
if not dry_run:
|
||||
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
||||
for dirname, dirs, files in os.walk(base_dir):
|
||||
visit(z, dirname, files)
|
||||
z.close()
|
||||
else:
|
||||
for dirname, dirs, files in os.walk(base_dir):
|
||||
visit(None, dirname, files)
|
||||
return zip_filename
|
||||
@@ -0,0 +1,43 @@
|
||||
import distutils.command.bdist_rpm as orig
|
||||
|
||||
|
||||
class bdist_rpm(orig.bdist_rpm):
|
||||
"""
|
||||
Override the default bdist_rpm behavior to do the following:
|
||||
|
||||
1. Run egg_info to ensure the name and version are properly calculated.
|
||||
2. Always run 'install' using --single-version-externally-managed to
|
||||
disable eggs in RPM distributions.
|
||||
3. Replace dash with underscore in the version numbers for better RPM
|
||||
compatibility.
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
# ensure distro name is up-to-date
|
||||
self.run_command('egg_info')
|
||||
|
||||
orig.bdist_rpm.run(self)
|
||||
|
||||
def _make_spec_file(self):
|
||||
version = self.distribution.get_version()
|
||||
rpmversion = version.replace('-', '_')
|
||||
spec = orig.bdist_rpm._make_spec_file(self)
|
||||
line23 = '%define version ' + version
|
||||
line24 = '%define version ' + rpmversion
|
||||
spec = [
|
||||
line.replace(
|
||||
"Source0: %{name}-%{version}.tar",
|
||||
"Source0: %{name}-%{unmangled_version}.tar"
|
||||
).replace(
|
||||
"setup.py install ",
|
||||
"setup.py install --single-version-externally-managed "
|
||||
).replace(
|
||||
"%setup",
|
||||
"%setup -n %{name}-%{unmangled_version}"
|
||||
).replace(line23, line24)
|
||||
for line in spec
|
||||
]
|
||||
insert_loc = spec.index(line24) + 1
|
||||
unmangled_version = "%define unmangled_version " + version
|
||||
spec.insert(insert_loc, unmangled_version)
|
||||
return spec
|
||||
@@ -0,0 +1,21 @@
|
||||
import distutils.command.bdist_wininst as orig
|
||||
|
||||
|
||||
class bdist_wininst(orig.bdist_wininst):
|
||||
def reinitialize_command(self, command, reinit_subcommands=0):
|
||||
"""
|
||||
Supplement reinitialize_command to work around
|
||||
http://bugs.python.org/issue20819
|
||||
"""
|
||||
cmd = self.distribution.reinitialize_command(
|
||||
command, reinit_subcommands)
|
||||
if command in ('install', 'install_lib'):
|
||||
cmd.install_lib = None
|
||||
return cmd
|
||||
|
||||
def run(self):
|
||||
self._is_running = True
|
||||
try:
|
||||
orig.bdist_wininst.run(self)
|
||||
finally:
|
||||
self._is_running = False
|
||||
@@ -0,0 +1,98 @@
|
||||
import distutils.command.build_clib as orig
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils import log
|
||||
from setuptools.dep_util import newer_pairwise_group
|
||||
|
||||
|
||||
class build_clib(orig.build_clib):
|
||||
"""
|
||||
Override the default build_clib behaviour to do the following:
|
||||
|
||||
1. Implement a rudimentary timestamp-based dependency system
|
||||
so 'compile()' doesn't run every time.
|
||||
2. Add more keys to the 'build_info' dictionary:
|
||||
* obj_deps - specify dependencies for each object compiled.
|
||||
this should be a dictionary mapping a key
|
||||
with the source filename to a list of
|
||||
dependencies. Use an empty string for global
|
||||
dependencies.
|
||||
* cflags - specify a list of additional flags to pass to
|
||||
the compiler.
|
||||
"""
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for (lib_name, build_info) in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name)
|
||||
sources = list(sources)
|
||||
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
# Make sure everything is the correct type.
|
||||
# obj_deps should be a dictionary of keys as sources
|
||||
# and a list/tuple of files that are its dependencies.
|
||||
obj_deps = build_info.get('obj_deps', dict())
|
||||
if not isinstance(obj_deps, dict):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
dependencies = []
|
||||
|
||||
# Get the global dependencies that are specified by the '' key.
|
||||
# These will go into every source's dependency list.
|
||||
global_deps = obj_deps.get('', list())
|
||||
if not isinstance(global_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
|
||||
# Build the list to be used by newer_pairwise_group
|
||||
# each source will be auto-added to its dependencies.
|
||||
for source in sources:
|
||||
src_deps = [source]
|
||||
src_deps.extend(global_deps)
|
||||
extra_deps = obj_deps.get(source, list())
|
||||
if not isinstance(extra_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
src_deps.extend(extra_deps)
|
||||
dependencies.append(src_deps)
|
||||
|
||||
expected_objects = self.compiler.object_filenames(
|
||||
sources,
|
||||
output_dir=self.build_temp
|
||||
)
|
||||
|
||||
if newer_pairwise_group(dependencies, expected_objects) != ([], []):
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
cflags = build_info.get('cflags')
|
||||
objects = self.compiler.compile(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
extra_postargs=cflags,
|
||||
debug=self.debug
|
||||
)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(
|
||||
expected_objects,
|
||||
lib_name,
|
||||
output_dir=self.build_clib,
|
||||
debug=self.debug
|
||||
)
|
||||
@@ -0,0 +1,328 @@
|
||||
import os
|
||||
import sys
|
||||
import itertools
|
||||
import imp
|
||||
from distutils.command.build_ext import build_ext as _du_build_ext
|
||||
from distutils.file_util import copy_file
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler, get_config_var
|
||||
from distutils.errors import DistutilsError
|
||||
from distutils import log
|
||||
|
||||
from setuptools.extension import Library
|
||||
import six
|
||||
|
||||
try:
|
||||
# Attempt to use Cython for building extensions, if available
|
||||
from Cython.Distutils.build_ext import build_ext as _build_ext
|
||||
except ImportError:
|
||||
_build_ext = _du_build_ext
|
||||
|
||||
# make sure _config_vars is initialized
|
||||
get_config_var("LDSHARED")
|
||||
from distutils.sysconfig import _config_vars as _CONFIG_VARS
|
||||
|
||||
|
||||
def _customize_compiler_for_shlib(compiler):
|
||||
if sys.platform == "darwin":
|
||||
# building .dylib requires additional compiler flags on OSX; here we
|
||||
# temporarily substitute the pyconfig.h variables so that distutils'
|
||||
# 'customize_compiler' uses them before we build the shared libraries.
|
||||
tmp = _CONFIG_VARS.copy()
|
||||
try:
|
||||
# XXX Help! I don't have any idea whether these are right...
|
||||
_CONFIG_VARS['LDSHARED'] = (
|
||||
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
|
||||
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
|
||||
_CONFIG_VARS['SO'] = ".dylib"
|
||||
customize_compiler(compiler)
|
||||
finally:
|
||||
_CONFIG_VARS.clear()
|
||||
_CONFIG_VARS.update(tmp)
|
||||
else:
|
||||
customize_compiler(compiler)
|
||||
|
||||
|
||||
have_rtld = False
|
||||
use_stubs = False
|
||||
libtype = 'shared'
|
||||
|
||||
if sys.platform == "darwin":
|
||||
use_stubs = True
|
||||
elif os.name != 'nt':
|
||||
try:
|
||||
import dl
|
||||
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if_dl = lambda s: s if have_rtld else ''
|
||||
|
||||
|
||||
def get_abi3_suffix():
|
||||
"""Return the file extension for an abi3-compliant Extension()"""
|
||||
for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION):
|
||||
if '.abi3' in suffix: # Unix
|
||||
return suffix
|
||||
elif suffix == '.pyd': # Windows
|
||||
return suffix
|
||||
|
||||
|
||||
class build_ext(_build_ext):
|
||||
def run(self):
|
||||
"""Build extensions in build directory, then copy if --inplace"""
|
||||
old_inplace, self.inplace = self.inplace, 0
|
||||
_build_ext.run(self)
|
||||
self.inplace = old_inplace
|
||||
if old_inplace:
|
||||
self.copy_extensions_to_source()
|
||||
|
||||
def copy_extensions_to_source(self):
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
for ext in self.extensions:
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
filename = self.get_ext_filename(fullname)
|
||||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[:-1])
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
dest_filename = os.path.join(package_dir,
|
||||
os.path.basename(filename))
|
||||
src_filename = os.path.join(self.build_lib, filename)
|
||||
|
||||
# Always copy, even if source is older than destination, to ensure
|
||||
# that the right extensions for the current Python/platform are
|
||||
# used.
|
||||
copy_file(
|
||||
src_filename, dest_filename, verbose=self.verbose,
|
||||
dry_run=self.dry_run
|
||||
)
|
||||
if ext._needs_stub:
|
||||
self.write_stub(package_dir or os.curdir, ext, True)
|
||||
|
||||
def get_ext_filename(self, fullname):
|
||||
filename = _build_ext.get_ext_filename(self, fullname)
|
||||
if fullname in self.ext_map:
|
||||
ext = self.ext_map[fullname]
|
||||
use_abi3 = (
|
||||
six.PY3
|
||||
and getattr(ext, 'py_limited_api')
|
||||
and get_abi3_suffix()
|
||||
)
|
||||
if use_abi3:
|
||||
so_ext = _get_config_var_837('EXT_SUFFIX')
|
||||
filename = filename[:-len(so_ext)]
|
||||
filename = filename + get_abi3_suffix()
|
||||
if isinstance(ext, Library):
|
||||
fn, ext = os.path.splitext(filename)
|
||||
return self.shlib_compiler.library_filename(fn, libtype)
|
||||
elif use_stubs and ext._links_to_dynamic:
|
||||
d, fn = os.path.split(filename)
|
||||
return os.path.join(d, 'dl-' + fn)
|
||||
return filename
|
||||
|
||||
def initialize_options(self):
|
||||
_build_ext.initialize_options(self)
|
||||
self.shlib_compiler = None
|
||||
self.shlibs = []
|
||||
self.ext_map = {}
|
||||
|
||||
def finalize_options(self):
|
||||
_build_ext.finalize_options(self)
|
||||
self.extensions = self.extensions or []
|
||||
self.check_extensions_list(self.extensions)
|
||||
self.shlibs = [ext for ext in self.extensions
|
||||
if isinstance(ext, Library)]
|
||||
if self.shlibs:
|
||||
self.setup_shlib_compiler()
|
||||
for ext in self.extensions:
|
||||
ext._full_name = self.get_ext_fullname(ext.name)
|
||||
for ext in self.extensions:
|
||||
fullname = ext._full_name
|
||||
self.ext_map[fullname] = ext
|
||||
|
||||
# distutils 3.1 will also ask for module names
|
||||
# XXX what to do with conflicts?
|
||||
self.ext_map[fullname.split('.')[-1]] = ext
|
||||
|
||||
ltd = self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ns = ltd and use_stubs and not isinstance(ext, Library)
|
||||
ext._links_to_dynamic = ltd
|
||||
ext._needs_stub = ns
|
||||
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
||||
if ltd and libdir not in ext.library_dirs:
|
||||
ext.library_dirs.append(libdir)
|
||||
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
||||
ext.runtime_library_dirs.append(os.curdir)
|
||||
|
||||
def setup_shlib_compiler(self):
|
||||
compiler = self.shlib_compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
||||
)
|
||||
_customize_compiler_for_shlib(compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name, value) in self.define:
|
||||
compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
compiler.undefine_macro(macro)
|
||||
if self.libraries is not None:
|
||||
compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs is not None:
|
||||
compiler.set_library_dirs(self.library_dirs)
|
||||
if self.rpath is not None:
|
||||
compiler.set_runtime_library_dirs(self.rpath)
|
||||
if self.link_objects is not None:
|
||||
compiler.set_link_objects(self.link_objects)
|
||||
|
||||
# hack so distutils' build_extension() builds a library instead
|
||||
compiler.link_shared_object = link_shared_object.__get__(compiler)
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
if isinstance(ext, Library):
|
||||
return ext.export_symbols
|
||||
return _build_ext.get_export_symbols(self, ext)
|
||||
|
||||
def build_extension(self, ext):
|
||||
ext._convert_pyx_sources_to_lang()
|
||||
_compiler = self.compiler
|
||||
try:
|
||||
if isinstance(ext, Library):
|
||||
self.compiler = self.shlib_compiler
|
||||
_build_ext.build_extension(self, ext)
|
||||
if ext._needs_stub:
|
||||
cmd = self.get_finalized_command('build_py').build_lib
|
||||
self.write_stub(cmd, ext)
|
||||
finally:
|
||||
self.compiler = _compiler
|
||||
|
||||
def links_to_dynamic(self, ext):
|
||||
"""Return true if 'ext' links to a dynamic lib in the same package"""
|
||||
# XXX this should check to ensure the lib is actually being built
|
||||
# XXX as dynamic, and not just using a locally-found version or a
|
||||
# XXX static-compiled version
|
||||
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
||||
return any(pkg + libname in libnames for libname in ext.libraries)
|
||||
|
||||
def get_outputs(self):
|
||||
return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
|
||||
|
||||
def __get_stubs_outputs(self):
|
||||
# assemble the base name for each extension that needs a stub
|
||||
ns_ext_bases = (
|
||||
os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
for ext in self.extensions
|
||||
if ext._needs_stub
|
||||
)
|
||||
# pair each base with the extension
|
||||
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
|
||||
return list(base + fnext for base, fnext in pairs)
|
||||
|
||||
def __get_output_extensions(self):
|
||||
yield '.py'
|
||||
yield '.pyc'
|
||||
if self.get_finalized_command('build_py').optimize:
|
||||
yield '.pyo'
|
||||
|
||||
def write_stub(self, output_dir, ext, compile=False):
|
||||
log.info("writing stub loader for %s to %s", ext._full_name,
|
||||
output_dir)
|
||||
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
|
||||
'.py')
|
||||
if compile and os.path.exists(stub_file):
|
||||
raise DistutilsError(stub_file + " already exists! Please delete.")
|
||||
if not self.dry_run:
|
||||
f = open(stub_file, 'w')
|
||||
f.write(
|
||||
'\n'.join([
|
||||
"def __bootstrap__():",
|
||||
" global __bootstrap__, __file__, __loader__",
|
||||
" import sys, os, pkg_resources, imp" + if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename"
|
||||
"(__name__,%r)"
|
||||
% os.path.basename(ext._file_name),
|
||||
" del __bootstrap__",
|
||||
" if '__loader__' in globals():",
|
||||
" del __loader__",
|
||||
if_dl(" old_flags = sys.getdlopenflags()"),
|
||||
" old_dir = os.getcwd()",
|
||||
" try:",
|
||||
" os.chdir(os.path.dirname(__file__))",
|
||||
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
|
||||
" imp.load_dynamic(__name__,__file__)",
|
||||
" finally:",
|
||||
if_dl(" sys.setdlopenflags(old_flags)"),
|
||||
" os.chdir(old_dir)",
|
||||
"__bootstrap__()",
|
||||
"" # terminal \n
|
||||
])
|
||||
)
|
||||
f.close()
|
||||
if compile:
|
||||
from distutils.util import byte_compile
|
||||
|
||||
byte_compile([stub_file], optimize=0,
|
||||
force=True, dry_run=self.dry_run)
|
||||
optimize = self.get_finalized_command('install_lib').optimize
|
||||
if optimize > 0:
|
||||
byte_compile([stub_file], optimize=optimize,
|
||||
force=True, dry_run=self.dry_run)
|
||||
if os.path.exists(stub_file) and not self.dry_run:
|
||||
os.unlink(stub_file)
|
||||
|
||||
|
||||
if use_stubs or os.name == 'nt':
|
||||
# Build shared libraries
|
||||
#
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
self.link(
|
||||
self.SHARED_LIBRARY, objects, output_libname,
|
||||
output_dir, libraries, library_dirs, runtime_library_dirs,
|
||||
export_symbols, debug, extra_preargs, extra_postargs,
|
||||
build_temp, target_lang
|
||||
)
|
||||
else:
|
||||
# Build static libraries everywhere else
|
||||
libtype = 'static'
|
||||
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
# XXX we need to either disallow these attrs on Library instances,
|
||||
# or warn/abort here if set, or something...
|
||||
# libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
# export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
# build_temp=None
|
||||
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir, filename = os.path.split(output_libname)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if self.library_filename("x").startswith('lib'):
|
||||
# strip 'lib' prefix; this is kludgy if some platform uses
|
||||
# a different prefix
|
||||
basename = basename[3:]
|
||||
|
||||
self.create_static_lib(
|
||||
objects, basename, output_dir, debug, target_lang
|
||||
)
|
||||
|
||||
|
||||
def _get_config_var_837(name):
|
||||
"""
|
||||
In https://github.com/pypa/setuptools/pull/837, we discovered
|
||||
Python 3.3.0 exposes the extension suffix under the name 'SO'.
|
||||
"""
|
||||
if sys.version_info < (3, 3, 1):
|
||||
name = 'SO'
|
||||
return get_config_var(name)
|
||||
270
flask/lib/python3.4/site-packages/setuptools/command/build_py.py
Normal file
270
flask/lib/python3.4/site-packages/setuptools/command/build_py.py
Normal file
@@ -0,0 +1,270 @@
|
||||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
import distutils.command.build_py as orig
|
||||
import os
|
||||
import fnmatch
|
||||
import textwrap
|
||||
import io
|
||||
import distutils.errors
|
||||
import itertools
|
||||
|
||||
import six
|
||||
from six.moves import map, filter, filterfalse
|
||||
|
||||
try:
|
||||
from setuptools.lib2to3_ex import Mixin2to3
|
||||
except ImportError:
|
||||
|
||||
class Mixin2to3:
|
||||
def run_2to3(self, files, doctests=True):
|
||||
"do nothing"
|
||||
|
||||
|
||||
class build_py(orig.build_py, Mixin2to3):
|
||||
"""Enhanced 'build_py' command that includes data files with packages
|
||||
|
||||
The data files are specified via a 'package_data' argument to 'setup()'.
|
||||
See 'setuptools.dist.Distribution' for more details.
|
||||
|
||||
Also, this version of the 'build_py' command allows you to specify both
|
||||
'py_modules' and 'packages' in the same setup operation.
|
||||
"""
|
||||
|
||||
def finalize_options(self):
|
||||
orig.build_py.finalize_options(self)
|
||||
self.package_data = self.distribution.package_data
|
||||
self.exclude_package_data = (self.distribution.exclude_package_data or
|
||||
{})
|
||||
if 'data_files' in self.__dict__:
|
||||
del self.__dict__['data_files']
|
||||
self.__updated_files = []
|
||||
self.__doctests_2to3 = []
|
||||
|
||||
def run(self):
|
||||
"""Build modules, packages, and copy data files to build directory"""
|
||||
if not self.py_modules and not self.packages:
|
||||
return
|
||||
|
||||
if self.py_modules:
|
||||
self.build_modules()
|
||||
|
||||
if self.packages:
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
self.run_2to3(self.__updated_files, False)
|
||||
self.run_2to3(self.__updated_files, True)
|
||||
self.run_2to3(self.__doctests_2to3, True)
|
||||
|
||||
# Only compile actual .py files, using our base class' idea of what our
|
||||
# output files are.
|
||||
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
"lazily compute data files"
|
||||
if attr == 'data_files':
|
||||
self.data_files = self._get_data_files()
|
||||
return self.data_files
|
||||
return orig.build_py.__getattr__(self, attr)
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
if six.PY2 and isinstance(package, six.string_types):
|
||||
# avoid errors on Python 2 when unicode is passed (#190)
|
||||
package = package.split('.')
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file,
|
||||
package)
|
||||
if copied:
|
||||
self.__updated_files.append(outfile)
|
||||
return outfile, copied
|
||||
|
||||
def _get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||
self.analyze_manifest()
|
||||
return list(map(self._get_pkg_data_files, self.packages or ()))
|
||||
|
||||
def _get_pkg_data_files(self, package):
|
||||
# Locate package source directory
|
||||
src_dir = self.get_package_dir(package)
|
||||
|
||||
# Compute package build directory
|
||||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
os.path.relpath(file, src_dir)
|
||||
for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
return package, src_dir, build_dir, filenames
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""Return filenames for package's data files in 'src_dir'"""
|
||||
patterns = self._get_platform_patterns(
|
||||
self.package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
globs_expanded = map(glob, patterns)
|
||||
# flatten the expanded globs into an iterable of matches
|
||||
globs_matches = itertools.chain.from_iterable(globs_expanded)
|
||||
glob_files = filter(os.path.isfile, globs_matches)
|
||||
files = itertools.chain(
|
||||
self.manifest_files.get(package, []),
|
||||
glob_files,
|
||||
)
|
||||
return self.exclude_data_files(package, src_dir, files)
|
||||
|
||||
def build_package_data(self):
|
||||
"""Copy data files into build directory"""
|
||||
for package, src_dir, build_dir, filenames in self.data_files:
|
||||
for filename in filenames:
|
||||
target = os.path.join(build_dir, filename)
|
||||
self.mkpath(os.path.dirname(target))
|
||||
srcfile = os.path.join(src_dir, filename)
|
||||
outf, copied = self.copy_file(srcfile, target)
|
||||
srcfile = os.path.abspath(srcfile)
|
||||
if (copied and
|
||||
srcfile in self.distribution.convert_2to3_doctests):
|
||||
self.__doctests_2to3.append(outf)
|
||||
|
||||
def analyze_manifest(self):
|
||||
self.manifest_files = mf = {}
|
||||
if not self.distribution.include_package_data:
|
||||
return
|
||||
src_dirs = {}
|
||||
for package in self.packages or ():
|
||||
# Locate package source directory
|
||||
src_dirs[assert_relative(self.get_package_dir(package))] = package
|
||||
|
||||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
for path in ei_cmd.filelist.files:
|
||||
d, f = os.path.split(assert_relative(path))
|
||||
prev = None
|
||||
oldf = f
|
||||
while d and d != prev and d not in src_dirs:
|
||||
prev = d
|
||||
d, df = os.path.split(d)
|
||||
f = os.path.join(df, f)
|
||||
if d in src_dirs:
|
||||
if path.endswith('.py') and f == oldf:
|
||||
continue # it's a module, not data
|
||||
mf.setdefault(src_dirs[d], []).append(path)
|
||||
|
||||
def get_data_files(self):
|
||||
pass # Lazily compute data files in _get_data_files() function.
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Check namespace packages' __init__ for declare_namespace"""
|
||||
try:
|
||||
return self.packages_checked[package]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
init_py = orig.build_py.check_package(self, package, package_dir)
|
||||
self.packages_checked[package] = init_py
|
||||
|
||||
if not init_py or not self.distribution.namespace_packages:
|
||||
return init_py
|
||||
|
||||
for pkg in self.distribution.namespace_packages:
|
||||
if pkg == package or pkg.startswith(package + '.'):
|
||||
break
|
||||
else:
|
||||
return init_py
|
||||
|
||||
with io.open(init_py, 'rb') as f:
|
||||
contents = f.read()
|
||||
if b'declare_namespace' not in contents:
|
||||
raise distutils.errors.DistutilsError(
|
||||
"Namespace package problem: %s is a namespace package, but "
|
||||
"its\n__init__.py does not call declare_namespace()! Please "
|
||||
'fix it.\n(See the setuptools manual under '
|
||||
'"Namespace Packages" for details.)\n"' % (package,)
|
||||
)
|
||||
return init_py
|
||||
|
||||
def initialize_options(self):
|
||||
self.packages_checked = {}
|
||||
orig.build_py.initialize_options(self)
|
||||
|
||||
def get_package_dir(self, package):
|
||||
res = orig.build_py.get_package_dir(self, package)
|
||||
if self.distribution.src_root is not None:
|
||||
return os.path.join(self.distribution.src_root, res)
|
||||
return res
|
||||
|
||||
def exclude_data_files(self, package, src_dir, files):
|
||||
"""Filter filenames for package's data files in 'src_dir'"""
|
||||
files = list(files)
|
||||
patterns = self._get_platform_patterns(
|
||||
self.exclude_package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
match_groups = (
|
||||
fnmatch.filter(files, pattern)
|
||||
for pattern in patterns
|
||||
)
|
||||
# flatten the groups of matches into an iterable of matches
|
||||
matches = itertools.chain.from_iterable(match_groups)
|
||||
bad = set(matches)
|
||||
keepers = (
|
||||
fn
|
||||
for fn in files
|
||||
if fn not in bad
|
||||
)
|
||||
# ditch dupes
|
||||
return list(_unique_everseen(keepers))
|
||||
|
||||
@staticmethod
|
||||
def _get_platform_patterns(spec, package, src_dir):
|
||||
"""
|
||||
yield platform-specific path patterns (suitable for glob
|
||||
or fn_match) from a glob-based spec (such as
|
||||
self.package_data or self.exclude_package_data)
|
||||
matching package in src_dir.
|
||||
"""
|
||||
raw_patterns = itertools.chain(
|
||||
spec.get('', []),
|
||||
spec.get(package, []),
|
||||
)
|
||||
return (
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
os.path.join(src_dir, convert_path(pattern))
|
||||
for pattern in raw_patterns
|
||||
)
|
||||
|
||||
|
||||
# from Python docs
|
||||
def _unique_everseen(iterable, key=None):
|
||||
"List unique elements, preserving order. Remember all elements ever seen."
|
||||
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
||||
# unique_everseen('ABBCcAD', str.lower) --> A B C D
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
if key is None:
|
||||
for element in filterfalse(seen.__contains__, iterable):
|
||||
seen_add(element)
|
||||
yield element
|
||||
else:
|
||||
for element in iterable:
|
||||
k = key(element)
|
||||
if k not in seen:
|
||||
seen_add(k)
|
||||
yield element
|
||||
|
||||
|
||||
def assert_relative(path):
|
||||
if not os.path.isabs(path):
|
||||
return path
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
msg = textwrap.dedent("""
|
||||
Error: setup script specifies an absolute path:
|
||||
|
||||
%s
|
||||
|
||||
setup() arguments must *always* be /-separated paths relative to the
|
||||
setup.py directory, *never* absolute paths.
|
||||
""").lstrip() % path
|
||||
raise DistutilsSetupError(msg)
|
||||
214
flask/lib/python3.4/site-packages/setuptools/command/develop.py
Normal file
214
flask/lib/python3.4/site-packages/setuptools/command/develop.py
Normal file
@@ -0,0 +1,214 @@
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
import os
|
||||
import glob
|
||||
import io
|
||||
|
||||
import six
|
||||
|
||||
from pkg_resources import Distribution, PathMetadata, normalize_path
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools import namespaces
|
||||
import setuptools
|
||||
|
||||
|
||||
class develop(namespaces.DevelopInstaller, easy_install):
|
||||
"""Set up package for development"""
|
||||
|
||||
description = "install package in 'development mode'"
|
||||
|
||||
user_options = easy_install.user_options + [
|
||||
("uninstall", "u", "Uninstall this source package"),
|
||||
("egg-path=", None, "Set the path to be used in the .egg-link file"),
|
||||
]
|
||||
|
||||
boolean_options = easy_install.boolean_options + ['uninstall']
|
||||
|
||||
command_consumes_arguments = False # override base
|
||||
|
||||
def run(self):
|
||||
if self.uninstall:
|
||||
self.multi_version = True
|
||||
self.uninstall_link()
|
||||
self.uninstall_namespaces()
|
||||
else:
|
||||
self.install_for_development()
|
||||
self.warn_deprecated_options()
|
||||
|
||||
def initialize_options(self):
|
||||
self.uninstall = None
|
||||
self.egg_path = None
|
||||
easy_install.initialize_options(self)
|
||||
self.setup_path = None
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
def finalize_options(self):
|
||||
ei = self.get_finalized_command("egg_info")
|
||||
if ei.broken_egg_info:
|
||||
template = "Please rename %r to %r before using 'develop'"
|
||||
args = ei.egg_info, ei.broken_egg_info
|
||||
raise DistutilsError(template % args)
|
||||
self.args = [ei.egg_name]
|
||||
|
||||
easy_install.finalize_options(self)
|
||||
self.expand_basedirs()
|
||||
self.expand_dirs()
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
self.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
egg_link_fn = ei.egg_name + '.egg-link'
|
||||
self.egg_link = os.path.join(self.install_dir, egg_link_fn)
|
||||
self.egg_base = ei.egg_base
|
||||
if self.egg_path is None:
|
||||
self.egg_path = os.path.abspath(ei.egg_base)
|
||||
|
||||
target = normalize_path(self.egg_base)
|
||||
egg_path = normalize_path(os.path.join(self.install_dir,
|
||||
self.egg_path))
|
||||
if egg_path != target:
|
||||
raise DistutilsOptionError(
|
||||
"--egg-path must be a relative path from the install"
|
||||
" directory to " + target
|
||||
)
|
||||
|
||||
# Make a distribution for the package's source
|
||||
self.dist = Distribution(
|
||||
target,
|
||||
PathMetadata(target, os.path.abspath(ei.egg_info)),
|
||||
project_name=ei.egg_name
|
||||
)
|
||||
|
||||
self.setup_path = self._resolve_setup_path(
|
||||
self.egg_base,
|
||||
self.install_dir,
|
||||
self.egg_path,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_setup_path(egg_base, install_dir, egg_path):
|
||||
"""
|
||||
Generate a path from egg_base back to '.' where the
|
||||
setup script resides and ensure that path points to the
|
||||
setup path from $install_dir/$egg_path.
|
||||
"""
|
||||
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
|
||||
if path_to_setup != os.curdir:
|
||||
path_to_setup = '../' * (path_to_setup.count('/') + 1)
|
||||
resolved = normalize_path(os.path.join(install_dir, egg_path, path_to_setup))
|
||||
if resolved != normalize_path(os.curdir):
|
||||
raise DistutilsOptionError(
|
||||
"Can't get a consistent path to setup script from"
|
||||
" installation directory", resolved, normalize_path(os.curdir))
|
||||
return path_to_setup
|
||||
|
||||
def install_for_development(self):
|
||||
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
# Ensure metadata is up-to-date
|
||||
self.reinitialize_command('build_py', inplace=0)
|
||||
self.run_command('build_py')
|
||||
bpy_cmd = self.get_finalized_command("build_py")
|
||||
build_path = normalize_path(bpy_cmd.build_lib)
|
||||
|
||||
# Build extensions
|
||||
self.reinitialize_command('egg_info', egg_base=build_path)
|
||||
self.run_command('egg_info')
|
||||
|
||||
self.reinitialize_command('build_ext', inplace=0)
|
||||
self.run_command('build_ext')
|
||||
|
||||
# Fixup egg-link and easy-install.pth
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_path = build_path
|
||||
self.dist.location = build_path
|
||||
# XXX
|
||||
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
|
||||
else:
|
||||
# Without 2to3 inplace works fine:
|
||||
self.run_command('egg_info')
|
||||
|
||||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
self.install_site_py() # ensure that target dir is site-safe
|
||||
if setuptools.bootstrap_install_from:
|
||||
self.easy_install(setuptools.bootstrap_install_from)
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
self.install_namespaces()
|
||||
|
||||
# create an .egg-link in the installation dir, pointing to our egg
|
||||
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
||||
if not self.dry_run:
|
||||
with open(self.egg_link, "w") as f:
|
||||
f.write(self.egg_path + "\n" + self.setup_path)
|
||||
# postprocess the installed distro, fixing up .pth, installing scripts,
|
||||
# and handling requirements
|
||||
self.process_distribution(None, self.dist, not self.no_deps)
|
||||
|
||||
def uninstall_link(self):
|
||||
if os.path.exists(self.egg_link):
|
||||
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
||||
egg_link_file = open(self.egg_link)
|
||||
contents = [line.rstrip() for line in egg_link_file]
|
||||
egg_link_file.close()
|
||||
if contents not in ([self.egg_path],
|
||||
[self.egg_path, self.setup_path]):
|
||||
log.warn("Link points to %s: uninstall aborted", contents)
|
||||
return
|
||||
if not self.dry_run:
|
||||
os.unlink(self.egg_link)
|
||||
if not self.dry_run:
|
||||
self.update_pth(self.dist) # remove any .pth link to us
|
||||
if self.distribution.scripts:
|
||||
# XXX should also check for entry point scripts!
|
||||
log.warn("Note: you must uninstall or replace scripts manually!")
|
||||
|
||||
def install_egg_scripts(self, dist):
|
||||
if dist is not self.dist:
|
||||
# Installing a dependency, so fall back to normal behavior
|
||||
return easy_install.install_egg_scripts(self, dist)
|
||||
|
||||
# create wrapper scripts in the script dir, pointing to dist.scripts
|
||||
|
||||
# new-style...
|
||||
self.install_wrapper_scripts(dist)
|
||||
|
||||
# ...and old-style
|
||||
for script_name in self.distribution.scripts or []:
|
||||
script_path = os.path.abspath(convert_path(script_name))
|
||||
script_name = os.path.basename(script_path)
|
||||
with io.open(script_path) as strm:
|
||||
script_text = strm.read()
|
||||
self.install_script(dist, script_name, script_text, script_path)
|
||||
|
||||
def install_wrapper_scripts(self, dist):
|
||||
dist = VersionlessRequirement(dist)
|
||||
return easy_install.install_wrapper_scripts(self, dist)
|
||||
|
||||
|
||||
class VersionlessRequirement(object):
|
||||
"""
|
||||
Adapt a pkg_resources.Distribution to simply return the project
|
||||
name as the 'requirement' so that scripts will work across
|
||||
multiple versions.
|
||||
|
||||
>>> dist = Distribution(project_name='foo', version='1.0')
|
||||
>>> str(dist.as_requirement())
|
||||
'foo==1.0'
|
||||
>>> adapted_dist = VersionlessRequirement(dist)
|
||||
>>> str(adapted_dist.as_requirement())
|
||||
'foo'
|
||||
"""
|
||||
|
||||
def __init__(self, dist):
|
||||
self.__dist = dist
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.__dist, name)
|
||||
|
||||
def as_requirement(self):
|
||||
return self.project_name
|
||||
2292
flask/lib/python3.4/site-packages/setuptools/command/easy_install.py
Normal file
2292
flask/lib/python3.4/site-packages/setuptools/command/easy_install.py
Normal file
File diff suppressed because it is too large
Load Diff
696
flask/lib/python3.4/site-packages/setuptools/command/egg_info.py
Normal file
696
flask/lib/python3.4/site-packages/setuptools/command/egg_info.py
Normal file
@@ -0,0 +1,696 @@
|
||||
"""setuptools.command.egg_info
|
||||
|
||||
Create a distribution's .egg-info directory and contents"""
|
||||
|
||||
from distutils.filelist import FileList as _FileList
|
||||
from distutils.errors import DistutilsInternalError
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.errors
|
||||
import distutils.filelist
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import io
|
||||
import warnings
|
||||
import time
|
||||
import collections
|
||||
|
||||
import six
|
||||
from six.moves import map
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.command.sdist import sdist
|
||||
from setuptools.command.sdist import walk_revctrl
|
||||
from setuptools.command.setopt import edit_config
|
||||
from setuptools.command import bdist_egg
|
||||
from pkg_resources import (
|
||||
parse_requirements, safe_name, parse_version,
|
||||
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
||||
import setuptools.unicode_utils as unicode_utils
|
||||
from setuptools.glob import glob
|
||||
|
||||
import packaging
|
||||
|
||||
|
||||
def translate_pattern(glob):
|
||||
"""
|
||||
Translate a file path glob like '*.txt' in to a regular expression.
|
||||
This differs from fnmatch.translate which allows wildcards to match
|
||||
directory separators. It also knows about '**/' which matches any number of
|
||||
directories.
|
||||
"""
|
||||
pat = ''
|
||||
|
||||
# This will split on '/' within [character classes]. This is deliberate.
|
||||
chunks = glob.split(os.path.sep)
|
||||
|
||||
sep = re.escape(os.sep)
|
||||
valid_char = '[^%s]' % (sep,)
|
||||
|
||||
for c, chunk in enumerate(chunks):
|
||||
last_chunk = c == len(chunks) - 1
|
||||
|
||||
# Chunks that are a literal ** are globstars. They match anything.
|
||||
if chunk == '**':
|
||||
if last_chunk:
|
||||
# Match anything if this is the last component
|
||||
pat += '.*'
|
||||
else:
|
||||
# Match '(name/)*'
|
||||
pat += '(?:%s+%s)*' % (valid_char, sep)
|
||||
continue # Break here as the whole path component has been handled
|
||||
|
||||
# Find any special characters in the remainder
|
||||
i = 0
|
||||
chunk_len = len(chunk)
|
||||
while i < chunk_len:
|
||||
char = chunk[i]
|
||||
if char == '*':
|
||||
# Match any number of name characters
|
||||
pat += valid_char + '*'
|
||||
elif char == '?':
|
||||
# Match a name character
|
||||
pat += valid_char
|
||||
elif char == '[':
|
||||
# Character class
|
||||
inner_i = i + 1
|
||||
# Skip initial !/] chars
|
||||
if inner_i < chunk_len and chunk[inner_i] == '!':
|
||||
inner_i = inner_i + 1
|
||||
if inner_i < chunk_len and chunk[inner_i] == ']':
|
||||
inner_i = inner_i + 1
|
||||
|
||||
# Loop till the closing ] is found
|
||||
while inner_i < chunk_len and chunk[inner_i] != ']':
|
||||
inner_i = inner_i + 1
|
||||
|
||||
if inner_i >= chunk_len:
|
||||
# Got to the end of the string without finding a closing ]
|
||||
# Do not treat this as a matching group, but as a literal [
|
||||
pat += re.escape(char)
|
||||
else:
|
||||
# Grab the insides of the [brackets]
|
||||
inner = chunk[i + 1:inner_i]
|
||||
char_class = ''
|
||||
|
||||
# Class negation
|
||||
if inner[0] == '!':
|
||||
char_class = '^'
|
||||
inner = inner[1:]
|
||||
|
||||
char_class += re.escape(inner)
|
||||
pat += '[%s]' % (char_class,)
|
||||
|
||||
# Skip to the end ]
|
||||
i = inner_i
|
||||
else:
|
||||
pat += re.escape(char)
|
||||
i += 1
|
||||
|
||||
# Join each chunk with the dir separator
|
||||
if not last_chunk:
|
||||
pat += sep
|
||||
|
||||
return re.compile(pat + r'\Z(?ms)')
|
||||
|
||||
|
||||
class egg_info(Command):
|
||||
description = "create a distribution's .egg-info directory"
|
||||
|
||||
user_options = [
|
||||
('egg-base=', 'e', "directory containing .egg-info directories"
|
||||
" (default: top of the source tree)"),
|
||||
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
||||
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
||||
('no-date', 'D', "Don't include date stamp [default]"),
|
||||
]
|
||||
|
||||
boolean_options = ['tag-date']
|
||||
negative_opt = {
|
||||
'no-date': 'tag-date',
|
||||
}
|
||||
|
||||
def initialize_options(self):
|
||||
self.egg_name = None
|
||||
self.egg_version = None
|
||||
self.egg_base = None
|
||||
self.egg_info = None
|
||||
self.tag_build = None
|
||||
self.tag_date = 0
|
||||
self.broken_egg_info = False
|
||||
self.vtags = None
|
||||
|
||||
####################################
|
||||
# allow the 'tag_svn_revision' to be detected and
|
||||
# set, supporting sdists built on older Setuptools.
|
||||
@property
|
||||
def tag_svn_revision(self):
|
||||
pass
|
||||
|
||||
@tag_svn_revision.setter
|
||||
def tag_svn_revision(self, value):
|
||||
pass
|
||||
####################################
|
||||
|
||||
def save_version_info(self, filename):
|
||||
"""
|
||||
Materialize the value of date into the
|
||||
build tag. Install build keys in a deterministic order
|
||||
to avoid arbitrary reordering on subsequent builds.
|
||||
"""
|
||||
# python 2.6 compatibility
|
||||
odict = getattr(collections, 'OrderedDict', dict)
|
||||
egg_info = odict()
|
||||
# follow the order these keys would have been added
|
||||
# when PYTHONHASHSEED=0
|
||||
egg_info['tag_build'] = self.tags()
|
||||
egg_info['tag_date'] = 0
|
||||
edit_config(filename, dict(egg_info=egg_info))
|
||||
|
||||
def finalize_options(self):
|
||||
self.egg_name = safe_name(self.distribution.get_name())
|
||||
self.vtags = self.tags()
|
||||
self.egg_version = self.tagged_version()
|
||||
|
||||
parsed_version = parse_version(self.egg_version)
|
||||
|
||||
try:
|
||||
is_version = isinstance(parsed_version, packaging.version.Version)
|
||||
spec = (
|
||||
"%s==%s" if is_version else "%s===%s"
|
||||
)
|
||||
list(
|
||||
parse_requirements(spec % (self.egg_name, self.egg_version))
|
||||
)
|
||||
except ValueError:
|
||||
raise distutils.errors.DistutilsOptionError(
|
||||
"Invalid distribution name or version syntax: %s-%s" %
|
||||
(self.egg_name, self.egg_version)
|
||||
)
|
||||
|
||||
if self.egg_base is None:
|
||||
dirs = self.distribution.package_dir
|
||||
self.egg_base = (dirs or {}).get('', os.curdir)
|
||||
|
||||
self.ensure_dirname('egg_base')
|
||||
self.egg_info = to_filename(self.egg_name) + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
||||
if '-' in self.egg_name:
|
||||
self.check_broken_egg_info()
|
||||
|
||||
# Set package version for the benefit of dumber commands
|
||||
# (e.g. sdist, bdist_wininst, etc.)
|
||||
#
|
||||
self.distribution.metadata.version = self.egg_version
|
||||
|
||||
# If we bootstrapped around the lack of a PKG-INFO, as might be the
|
||||
# case in a fresh checkout, make sure that any special tags get added
|
||||
# to the version info
|
||||
#
|
||||
pd = self.distribution._patched_dist
|
||||
if pd is not None and pd.key == self.egg_name.lower():
|
||||
pd._version = self.egg_version
|
||||
pd._parsed_version = parse_version(self.egg_version)
|
||||
self.distribution._patched_dist = None
|
||||
|
||||
def write_or_delete_file(self, what, filename, data, force=False):
|
||||
"""Write `data` to `filename` or delete if empty
|
||||
|
||||
If `data` is non-empty, this routine is the same as ``write_file()``.
|
||||
If `data` is empty but not ``None``, this is the same as calling
|
||||
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
|
||||
unless `filename` exists, in which case a warning is issued about the
|
||||
orphaned file (if `force` is false), or deleted (if `force` is true).
|
||||
"""
|
||||
if data:
|
||||
self.write_file(what, filename, data)
|
||||
elif os.path.exists(filename):
|
||||
if data is None and not force:
|
||||
log.warn(
|
||||
"%s not set in setup(), but %s exists", what, filename
|
||||
)
|
||||
return
|
||||
else:
|
||||
self.delete_file(filename)
|
||||
|
||||
def write_file(self, what, filename, data):
|
||||
"""Write `data` to `filename` (if not a dry run) after announcing it
|
||||
|
||||
`what` is used in a log message to identify what is being written
|
||||
to the file.
|
||||
"""
|
||||
log.info("writing %s to %s", what, filename)
|
||||
if six.PY3:
|
||||
data = data.encode("utf-8")
|
||||
if not self.dry_run:
|
||||
f = open(filename, 'wb')
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
def delete_file(self, filename):
|
||||
"""Delete `filename` (if not a dry run) after announcing it"""
|
||||
log.info("deleting %s", filename)
|
||||
if not self.dry_run:
|
||||
os.unlink(filename)
|
||||
|
||||
def tagged_version(self):
|
||||
version = self.distribution.get_version()
|
||||
# egg_info may be called more than once for a distribution,
|
||||
# in which case the version string already contains all tags.
|
||||
if self.vtags and version.endswith(self.vtags):
|
||||
return safe_version(version)
|
||||
return safe_version(version + self.vtags)
|
||||
|
||||
def run(self):
|
||||
self.mkpath(self.egg_info)
|
||||
installer = self.distribution.fetch_build_egg
|
||||
for ep in iter_entry_points('egg_info.writers'):
|
||||
ep.require(installer=installer)
|
||||
writer = ep.resolve()
|
||||
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
||||
|
||||
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
||||
nl = os.path.join(self.egg_info, "native_libs.txt")
|
||||
if os.path.exists(nl):
|
||||
self.delete_file(nl)
|
||||
|
||||
self.find_sources()
|
||||
|
||||
def tags(self):
|
||||
version = ''
|
||||
if self.tag_build:
|
||||
version += self.tag_build
|
||||
if self.tag_date:
|
||||
version += time.strftime("-%Y%m%d")
|
||||
return version
|
||||
|
||||
def find_sources(self):
|
||||
"""Generate SOURCES.txt manifest file"""
|
||||
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
||||
mm = manifest_maker(self.distribution)
|
||||
mm.manifest = manifest_filename
|
||||
mm.run()
|
||||
self.filelist = mm.filelist
|
||||
|
||||
def check_broken_egg_info(self):
|
||||
bei = self.egg_name + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
bei = os.path.join(self.egg_base, bei)
|
||||
if os.path.exists(bei):
|
||||
log.warn(
|
||||
"-" * 78 + '\n'
|
||||
"Note: Your current .egg-info directory has a '-' in its name;"
|
||||
'\nthis will not work correctly with "setup.py develop".\n\n'
|
||||
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
|
||||
bei, self.egg_info
|
||||
)
|
||||
self.broken_egg_info = self.egg_info
|
||||
self.egg_info = bei # make it work for now
|
||||
|
||||
|
||||
class FileList(_FileList):
|
||||
# Implementations of the various MANIFEST.in commands
|
||||
|
||||
def process_template_line(self, line):
|
||||
# Parse the line: split it up, make sure the right number of words
|
||||
# is there, and return the relevant words. 'action' is always
|
||||
# defined: it's the first word of the line. Which of the other
|
||||
# three are defined depends on the action; it'll be either
|
||||
# patterns, (dir and patterns), or (dir_pattern).
|
||||
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
||||
|
||||
# OK, now we know that the action is valid and we have the
|
||||
# right number of words on the line for that action -- so we
|
||||
# can proceed with minimal error-checking.
|
||||
if action == 'include':
|
||||
self.debug_print("include " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.include(pattern):
|
||||
log.warn("warning: no files found matching '%s'", pattern)
|
||||
|
||||
elif action == 'exclude':
|
||||
self.debug_print("exclude " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.exclude(pattern):
|
||||
log.warn(("warning: no previously-included files "
|
||||
"found matching '%s'"), pattern)
|
||||
|
||||
elif action == 'global-include':
|
||||
self.debug_print("global-include " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.global_include(pattern):
|
||||
log.warn(("warning: no files found matching '%s' "
|
||||
"anywhere in distribution"), pattern)
|
||||
|
||||
elif action == 'global-exclude':
|
||||
self.debug_print("global-exclude " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.global_exclude(pattern):
|
||||
log.warn(("warning: no previously-included files matching "
|
||||
"'%s' found anywhere in distribution"),
|
||||
pattern)
|
||||
|
||||
elif action == 'recursive-include':
|
||||
self.debug_print("recursive-include %s %s" %
|
||||
(dir, ' '.join(patterns)))
|
||||
for pattern in patterns:
|
||||
if not self.recursive_include(dir, pattern):
|
||||
log.warn(("warning: no files found matching '%s' "
|
||||
"under directory '%s'"),
|
||||
pattern, dir)
|
||||
|
||||
elif action == 'recursive-exclude':
|
||||
self.debug_print("recursive-exclude %s %s" %
|
||||
(dir, ' '.join(patterns)))
|
||||
for pattern in patterns:
|
||||
if not self.recursive_exclude(dir, pattern):
|
||||
log.warn(("warning: no previously-included files matching "
|
||||
"'%s' found under directory '%s'"),
|
||||
pattern, dir)
|
||||
|
||||
elif action == 'graft':
|
||||
self.debug_print("graft " + dir_pattern)
|
||||
if not self.graft(dir_pattern):
|
||||
log.warn("warning: no directories found matching '%s'",
|
||||
dir_pattern)
|
||||
|
||||
elif action == 'prune':
|
||||
self.debug_print("prune " + dir_pattern)
|
||||
if not self.prune(dir_pattern):
|
||||
log.warn(("no previously-included directories found "
|
||||
"matching '%s'"), dir_pattern)
|
||||
|
||||
else:
|
||||
raise DistutilsInternalError(
|
||||
"this cannot happen: invalid action '%s'" % action)
|
||||
|
||||
def _remove_files(self, predicate):
|
||||
"""
|
||||
Remove all files from the file list that match the predicate.
|
||||
Return True if any matching files were removed
|
||||
"""
|
||||
found = False
|
||||
for i in range(len(self.files) - 1, -1, -1):
|
||||
if predicate(self.files[i]):
|
||||
self.debug_print(" removing " + self.files[i])
|
||||
del self.files[i]
|
||||
found = True
|
||||
return found
|
||||
|
||||
def include(self, pattern):
|
||||
"""Include files that match 'pattern'."""
|
||||
found = [f for f in glob(pattern) if not os.path.isdir(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def exclude(self, pattern):
|
||||
"""Exclude files that match 'pattern'."""
|
||||
match = translate_pattern(pattern)
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def recursive_include(self, dir, pattern):
|
||||
"""
|
||||
Include all files anywhere in 'dir/' that match the pattern.
|
||||
"""
|
||||
full_pattern = os.path.join(dir, '**', pattern)
|
||||
found = [f for f in glob(full_pattern, recursive=True)
|
||||
if not os.path.isdir(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def recursive_exclude(self, dir, pattern):
|
||||
"""
|
||||
Exclude any file anywhere in 'dir/' that match the pattern.
|
||||
"""
|
||||
match = translate_pattern(os.path.join(dir, '**', pattern))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def graft(self, dir):
|
||||
"""Include all files from 'dir/'."""
|
||||
found = [
|
||||
item
|
||||
for match_dir in glob(dir)
|
||||
for item in distutils.filelist.findall(match_dir)
|
||||
]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def prune(self, dir):
|
||||
"""Filter out files from 'dir/'."""
|
||||
match = translate_pattern(os.path.join(dir, '**'))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def global_include(self, pattern):
|
||||
"""
|
||||
Include all files anywhere in the current directory that match the
|
||||
pattern. This is very inefficient on large file trees.
|
||||
"""
|
||||
if self.allfiles is None:
|
||||
self.findall()
|
||||
match = translate_pattern(os.path.join('**', pattern))
|
||||
found = [f for f in self.allfiles if match.match(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def global_exclude(self, pattern):
|
||||
"""
|
||||
Exclude all files anywhere that match the pattern.
|
||||
"""
|
||||
match = translate_pattern(os.path.join('**', pattern))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def append(self, item):
|
||||
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||
item = item[:-1]
|
||||
path = convert_path(item)
|
||||
|
||||
if self._safe_path(path):
|
||||
self.files.append(path)
|
||||
|
||||
def extend(self, paths):
|
||||
self.files.extend(filter(self._safe_path, paths))
|
||||
|
||||
def _repair(self):
|
||||
"""
|
||||
Replace self.files with only safe paths
|
||||
|
||||
Because some owners of FileList manipulate the underlying
|
||||
``files`` attribute directly, this method must be called to
|
||||
repair those paths.
|
||||
"""
|
||||
self.files = list(filter(self._safe_path, self.files))
|
||||
|
||||
def _safe_path(self, path):
|
||||
enc_warn = "'%s' not %s encodable -- skipping"
|
||||
|
||||
# To avoid accidental trans-codings errors, first to unicode
|
||||
u_path = unicode_utils.filesys_decode(path)
|
||||
if u_path is None:
|
||||
log.warn("'%s' in unexpected encoding -- skipping" % path)
|
||||
return False
|
||||
|
||||
# Must ensure utf-8 encodability
|
||||
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
||||
if utf8_path is None:
|
||||
log.warn(enc_warn, path, 'utf-8')
|
||||
return False
|
||||
|
||||
try:
|
||||
# accept is either way checks out
|
||||
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
||||
return True
|
||||
# this will catch any encode errors decoding u_path
|
||||
except UnicodeEncodeError:
|
||||
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
||||
|
||||
|
||||
class manifest_maker(sdist):
|
||||
template = "MANIFEST.in"
|
||||
|
||||
def initialize_options(self):
|
||||
self.use_defaults = 1
|
||||
self.prune = 1
|
||||
self.manifest_only = 1
|
||||
self.force_manifest = 1
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
self.filelist = FileList()
|
||||
if not os.path.exists(self.manifest):
|
||||
self.write_manifest() # it must exist so it'll get in the list
|
||||
self.add_defaults()
|
||||
if os.path.exists(self.template):
|
||||
self.read_template()
|
||||
self.prune_file_list()
|
||||
self.filelist.sort()
|
||||
self.filelist.remove_duplicates()
|
||||
self.write_manifest()
|
||||
|
||||
def _manifest_normalize(self, path):
|
||||
path = unicode_utils.filesys_decode(path)
|
||||
return path.replace(os.sep, '/')
|
||||
|
||||
def write_manifest(self):
|
||||
"""
|
||||
Write the file list in 'self.filelist' to the manifest file
|
||||
named by 'self.manifest'.
|
||||
"""
|
||||
self.filelist._repair()
|
||||
|
||||
# Now _repairs should encodability, but not unicode
|
||||
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
||||
msg = "writing manifest file '%s'" % self.manifest
|
||||
self.execute(write_file, (self.manifest, files), msg)
|
||||
|
||||
def warn(self, msg):
|
||||
if not self._should_suppress_warning(msg):
|
||||
sdist.warn(self, msg)
|
||||
|
||||
@staticmethod
|
||||
def _should_suppress_warning(msg):
|
||||
"""
|
||||
suppress missing-file warnings from sdist
|
||||
"""
|
||||
return re.match(r"standard file .*not found", msg)
|
||||
|
||||
def add_defaults(self):
|
||||
sdist.add_defaults(self)
|
||||
self.filelist.append(self.template)
|
||||
self.filelist.append(self.manifest)
|
||||
rcfiles = list(walk_revctrl())
|
||||
if rcfiles:
|
||||
self.filelist.extend(rcfiles)
|
||||
elif os.path.exists(self.manifest):
|
||||
self.read_manifest()
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self.filelist.graft(ei_cmd.egg_info)
|
||||
|
||||
def prune_file_list(self):
|
||||
build = self.get_finalized_command('build')
|
||||
base_dir = self.distribution.get_fullname()
|
||||
self.filelist.prune(build.build_base)
|
||||
self.filelist.prune(base_dir)
|
||||
sep = re.escape(os.sep)
|
||||
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
|
||||
is_regex=1)
|
||||
|
||||
|
||||
def write_file(filename, contents):
|
||||
"""Create a file with the specified name and write 'contents' (a
|
||||
sequence of strings without line terminators) to it.
|
||||
"""
|
||||
contents = "\n".join(contents)
|
||||
|
||||
# assuming the contents has been vetted for utf-8 encoding
|
||||
contents = contents.encode("utf-8")
|
||||
|
||||
with open(filename, "wb") as f: # always write POSIX-style manifest
|
||||
f.write(contents)
|
||||
|
||||
|
||||
def write_pkg_info(cmd, basename, filename):
|
||||
log.info("writing %s", filename)
|
||||
if not cmd.dry_run:
|
||||
metadata = cmd.distribution.metadata
|
||||
metadata.version, oldver = cmd.egg_version, metadata.version
|
||||
metadata.name, oldname = cmd.egg_name, metadata.name
|
||||
try:
|
||||
# write unescaped data to PKG-INFO, so older pkg_resources
|
||||
# can still parse it
|
||||
metadata.write_pkg_info(cmd.egg_info)
|
||||
finally:
|
||||
metadata.name, metadata.version = oldname, oldver
|
||||
|
||||
safe = getattr(cmd.distribution, 'zip_safe', None)
|
||||
|
||||
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
||||
|
||||
|
||||
def warn_depends_obsolete(cmd, basename, filename):
|
||||
if os.path.exists(filename):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
)
|
||||
|
||||
|
||||
def _write_requirements(stream, reqs):
|
||||
lines = yield_lines(reqs or ())
|
||||
append_cr = lambda line: line + '\n'
|
||||
lines = map(append_cr, lines)
|
||||
stream.writelines(lines)
|
||||
|
||||
|
||||
def write_requirements(cmd, basename, filename):
|
||||
dist = cmd.distribution
|
||||
data = six.StringIO()
|
||||
_write_requirements(data, dist.install_requires)
|
||||
extras_require = dist.extras_require or {}
|
||||
for extra in sorted(extras_require):
|
||||
data.write('\n[{extra}]\n'.format(**vars()))
|
||||
_write_requirements(data, extras_require[extra])
|
||||
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_setup_requirements(cmd, basename, filename):
|
||||
data = StringIO()
|
||||
_write_requirements(data, cmd.distribution.setup_requires)
|
||||
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_toplevel_names(cmd, basename, filename):
|
||||
pkgs = dict.fromkeys(
|
||||
[
|
||||
k.split('.', 1)[0]
|
||||
for k in cmd.distribution.iter_distribution_names()
|
||||
]
|
||||
)
|
||||
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
|
||||
|
||||
|
||||
def overwrite_arg(cmd, basename, filename):
|
||||
write_arg(cmd, basename, filename, True)
|
||||
|
||||
|
||||
def write_arg(cmd, basename, filename, force=False):
|
||||
argname = os.path.splitext(basename)[0]
|
||||
value = getattr(cmd.distribution, argname, None)
|
||||
if value is not None:
|
||||
value = '\n'.join(value) + '\n'
|
||||
cmd.write_or_delete_file(argname, filename, value, force)
|
||||
|
||||
|
||||
def write_entries(cmd, basename, filename):
|
||||
ep = cmd.distribution.entry_points
|
||||
|
||||
if isinstance(ep, six.string_types) or ep is None:
|
||||
data = ep
|
||||
elif ep is not None:
|
||||
data = []
|
||||
for section, contents in sorted(ep.items()):
|
||||
if not isinstance(contents, six.string_types):
|
||||
contents = EntryPoint.parse_group(section, contents)
|
||||
contents = '\n'.join(sorted(map(str, contents.values())))
|
||||
data.append('[%s]\n%s\n\n' % (section, contents))
|
||||
data = ''.join(data)
|
||||
|
||||
cmd.write_or_delete_file('entry points', filename, data, True)
|
||||
|
||||
|
||||
def get_pkg_info_revision():
|
||||
"""
|
||||
Get a -r### off of PKG-INFO Version in case this is an sdist of
|
||||
a subversion revision.
|
||||
"""
|
||||
warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning)
|
||||
if os.path.exists('PKG-INFO'):
|
||||
with io.open('PKG-INFO') as f:
|
||||
for line in f:
|
||||
match = re.match(r"Version:.*-r(\d+)\s*$", line)
|
||||
if match:
|
||||
return int(match.group(1))
|
||||
return 0
|
||||
125
flask/lib/python3.4/site-packages/setuptools/command/install.py
Normal file
125
flask/lib/python3.4/site-packages/setuptools/command/install.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from distutils.errors import DistutilsArgError
|
||||
import inspect
|
||||
import glob
|
||||
import warnings
|
||||
import platform
|
||||
import distutils.command.install as orig
|
||||
|
||||
import setuptools
|
||||
|
||||
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
|
||||
# now. See https://github.com/pypa/setuptools/issues/199/
|
||||
_install = orig.install
|
||||
|
||||
|
||||
class install(orig.install):
|
||||
"""Use easy_install to install the package, w/dependencies"""
|
||||
|
||||
user_options = orig.install.user_options + [
|
||||
('old-and-unmanageable', None, "Try not to use this!"),
|
||||
('single-version-externally-managed', None,
|
||||
"used by system package builders to create 'flat' eggs"),
|
||||
]
|
||||
boolean_options = orig.install.boolean_options + [
|
||||
'old-and-unmanageable', 'single-version-externally-managed',
|
||||
]
|
||||
new_commands = [
|
||||
('install_egg_info', lambda self: True),
|
||||
('install_scripts', lambda self: True),
|
||||
]
|
||||
_nc = dict(new_commands)
|
||||
|
||||
def initialize_options(self):
|
||||
orig.install.initialize_options(self)
|
||||
self.old_and_unmanageable = None
|
||||
self.single_version_externally_managed = None
|
||||
|
||||
def finalize_options(self):
|
||||
orig.install.finalize_options(self)
|
||||
if self.root:
|
||||
self.single_version_externally_managed = True
|
||||
elif self.single_version_externally_managed:
|
||||
if not self.root and not self.record:
|
||||
raise DistutilsArgError(
|
||||
"You must specify --record or --root when building system"
|
||||
" packages"
|
||||
)
|
||||
|
||||
def handle_extra_path(self):
|
||||
if self.root or self.single_version_externally_managed:
|
||||
# explicit backward-compatibility mode, allow extra_path to work
|
||||
return orig.install.handle_extra_path(self)
|
||||
|
||||
# Ignore extra_path when installing an egg (or being run by another
|
||||
# command without --root or --single-version-externally-managed
|
||||
self.path_file = None
|
||||
self.extra_dirs = ''
|
||||
|
||||
def run(self):
|
||||
# Explicit request for old-style install? Just do it
|
||||
if self.old_and_unmanageable or self.single_version_externally_managed:
|
||||
return orig.install.run(self)
|
||||
|
||||
if not self._called_from_setup(inspect.currentframe()):
|
||||
# Run in backward-compatibility mode to support bdist_* commands.
|
||||
orig.install.run(self)
|
||||
else:
|
||||
self.do_egg_install()
|
||||
|
||||
@staticmethod
|
||||
def _called_from_setup(run_frame):
|
||||
"""
|
||||
Attempt to detect whether run() was called from setup() or by another
|
||||
command. If called by setup(), the parent caller will be the
|
||||
'run_command' method in 'distutils.dist', and *its* caller will be
|
||||
the 'run_commands' method. If called any other way, the
|
||||
immediate caller *might* be 'run_command', but it won't have been
|
||||
called by 'run_commands'. Return True in that case or if a call stack
|
||||
is unavailable. Return False otherwise.
|
||||
"""
|
||||
if run_frame is None:
|
||||
msg = "Call stack not available. bdist_* commands may fail."
|
||||
warnings.warn(msg)
|
||||
if platform.python_implementation() == 'IronPython':
|
||||
msg = "For best results, pass -X:Frames to enable call stack."
|
||||
warnings.warn(msg)
|
||||
return True
|
||||
res = inspect.getouterframes(run_frame)[2]
|
||||
caller, = res[:1]
|
||||
info = inspect.getframeinfo(caller)
|
||||
caller_module = caller.f_globals.get('__name__', '')
|
||||
return (
|
||||
caller_module == 'distutils.dist'
|
||||
and info.function == 'run_commands'
|
||||
)
|
||||
|
||||
def do_egg_install(self):
|
||||
|
||||
easy_install = self.distribution.get_command_class('easy_install')
|
||||
|
||||
cmd = easy_install(
|
||||
self.distribution, args="x", root=self.root, record=self.record,
|
||||
)
|
||||
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
|
||||
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
|
||||
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
cmd.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
self.run_command('bdist_egg')
|
||||
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
|
||||
|
||||
if setuptools.bootstrap_install_from:
|
||||
# Bootstrap self-installation of setuptools
|
||||
args.insert(0, setuptools.bootstrap_install_from)
|
||||
|
||||
cmd.args = args
|
||||
cmd.run()
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
|
||||
# XXX Python 3.1 doesn't see _nc if this is inside the class
|
||||
install.sub_commands = (
|
||||
[cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
|
||||
install.new_commands
|
||||
)
|
||||
@@ -0,0 +1,62 @@
|
||||
from distutils import log, dir_util
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools import namespaces
|
||||
from setuptools.archive_util import unpack_archive
|
||||
import pkg_resources
|
||||
|
||||
|
||||
class install_egg_info(namespaces.Installer, Command):
|
||||
"""Install an .egg-info directory for the package"""
|
||||
|
||||
description = "Install an .egg-info directory for the package"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_lib',
|
||||
('install_dir', 'install_dir'))
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
basename = pkg_resources.Distribution(
|
||||
None, None, ei_cmd.egg_name, ei_cmd.egg_version
|
||||
).egg_name() + '.egg-info'
|
||||
self.source = ei_cmd.egg_info
|
||||
self.target = os.path.join(self.install_dir, basename)
|
||||
self.outputs = []
|
||||
|
||||
def run(self):
|
||||
self.run_command('egg_info')
|
||||
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
||||
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
||||
elif os.path.exists(self.target):
|
||||
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
||||
if not self.dry_run:
|
||||
pkg_resources.ensure_directory(self.target)
|
||||
self.execute(
|
||||
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
|
||||
)
|
||||
self.install_namespaces()
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outputs
|
||||
|
||||
def copytree(self):
|
||||
# Copy the .egg-info tree to site-packages
|
||||
def skimmer(src, dst):
|
||||
# filter out source-control directories; note that 'src' is always
|
||||
# a '/'-separated path, regardless of platform. 'dst' is a
|
||||
# platform-specific path.
|
||||
for skip in '.svn/', 'CVS/':
|
||||
if src.startswith(skip) or '/' + skip in src:
|
||||
return None
|
||||
self.outputs.append(dst)
|
||||
log.debug("Copying %s to %s", src, dst)
|
||||
return dst
|
||||
|
||||
unpack_archive(self.source, self.target, skimmer)
|
||||
@@ -0,0 +1,121 @@
|
||||
import os
|
||||
import imp
|
||||
from itertools import product, starmap
|
||||
import distutils.command.install_lib as orig
|
||||
|
||||
|
||||
class install_lib(orig.install_lib):
|
||||
"""Don't add compiled flags to filenames of non-Python files"""
|
||||
|
||||
def run(self):
|
||||
self.build()
|
||||
outfiles = self.install()
|
||||
if outfiles is not None:
|
||||
# always compile, in case we have any extension stubs to deal with
|
||||
self.byte_compile(outfiles)
|
||||
|
||||
def get_exclusions(self):
|
||||
"""
|
||||
Return a collections.Sized collections.Container of paths to be
|
||||
excluded for single_version_externally_managed installations.
|
||||
"""
|
||||
all_packages = (
|
||||
pkg
|
||||
for ns_pkg in self._get_SVEM_NSPs()
|
||||
for pkg in self._all_packages(ns_pkg)
|
||||
)
|
||||
|
||||
excl_specs = product(all_packages, self._gen_exclusion_paths())
|
||||
return set(starmap(self._exclude_pkg_path, excl_specs))
|
||||
|
||||
def _exclude_pkg_path(self, pkg, exclusion_path):
|
||||
"""
|
||||
Given a package name and exclusion path within that package,
|
||||
compute the full exclusion path.
|
||||
"""
|
||||
parts = pkg.split('.') + [exclusion_path]
|
||||
return os.path.join(self.install_dir, *parts)
|
||||
|
||||
@staticmethod
|
||||
def _all_packages(pkg_name):
|
||||
"""
|
||||
>>> list(install_lib._all_packages('foo.bar.baz'))
|
||||
['foo.bar.baz', 'foo.bar', 'foo']
|
||||
"""
|
||||
while pkg_name:
|
||||
yield pkg_name
|
||||
pkg_name, sep, child = pkg_name.rpartition('.')
|
||||
|
||||
def _get_SVEM_NSPs(self):
|
||||
"""
|
||||
Get namespace packages (list) but only for
|
||||
single_version_externally_managed installations and empty otherwise.
|
||||
"""
|
||||
# TODO: is it necessary to short-circuit here? i.e. what's the cost
|
||||
# if get_finalized_command is called even when namespace_packages is
|
||||
# False?
|
||||
if not self.distribution.namespace_packages:
|
||||
return []
|
||||
|
||||
install_cmd = self.get_finalized_command('install')
|
||||
svem = install_cmd.single_version_externally_managed
|
||||
|
||||
return self.distribution.namespace_packages if svem else []
|
||||
|
||||
@staticmethod
|
||||
def _gen_exclusion_paths():
|
||||
"""
|
||||
Generate file paths to be excluded for namespace packages (bytecode
|
||||
cache files).
|
||||
"""
|
||||
# always exclude the package module itself
|
||||
yield '__init__.py'
|
||||
|
||||
yield '__init__.pyc'
|
||||
yield '__init__.pyo'
|
||||
|
||||
if not hasattr(imp, 'get_tag'):
|
||||
return
|
||||
|
||||
base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
|
||||
yield base + '.pyc'
|
||||
yield base + '.pyo'
|
||||
yield base + '.opt-1.pyc'
|
||||
yield base + '.opt-2.pyc'
|
||||
|
||||
def copy_tree(
|
||||
self, infile, outfile,
|
||||
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
||||
):
|
||||
assert preserve_mode and preserve_times and not preserve_symlinks
|
||||
exclude = self.get_exclusions()
|
||||
|
||||
if not exclude:
|
||||
return orig.install_lib.copy_tree(self, infile, outfile)
|
||||
|
||||
# Exclude namespace package __init__.py* files from the output
|
||||
|
||||
from setuptools.archive_util import unpack_directory
|
||||
from distutils import log
|
||||
|
||||
outfiles = []
|
||||
|
||||
def pf(src, dst):
|
||||
if dst in exclude:
|
||||
log.warn("Skipping installation of %s (namespace package)",
|
||||
dst)
|
||||
return False
|
||||
|
||||
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
||||
outfiles.append(dst)
|
||||
return dst
|
||||
|
||||
unpack_directory(infile, outfile, pf)
|
||||
return outfiles
|
||||
|
||||
def get_outputs(self):
|
||||
outputs = orig.install_lib.get_outputs(self)
|
||||
exclude = self.get_exclusions()
|
||||
if exclude:
|
||||
return [f for f in outputs if f not in exclude]
|
||||
return outputs
|
||||
@@ -0,0 +1,65 @@
|
||||
from distutils import log
|
||||
import distutils.command.install_scripts as orig
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pkg_resources import Distribution, PathMetadata, ensure_directory
|
||||
|
||||
|
||||
class install_scripts(orig.install_scripts):
|
||||
"""Do normal script install, plus any egg_info wrapper scripts"""
|
||||
|
||||
def initialize_options(self):
|
||||
orig.install_scripts.initialize_options(self)
|
||||
self.no_ep = False
|
||||
|
||||
def run(self):
|
||||
import setuptools.command.easy_install as ei
|
||||
|
||||
self.run_command("egg_info")
|
||||
if self.distribution.scripts:
|
||||
orig.install_scripts.run(self) # run first to set up self.outfiles
|
||||
else:
|
||||
self.outfiles = []
|
||||
if self.no_ep:
|
||||
# don't install entry point scripts into .egg file!
|
||||
return
|
||||
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
dist = Distribution(
|
||||
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
|
||||
ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
)
|
||||
bs_cmd = self.get_finalized_command('build_scripts')
|
||||
exec_param = getattr(bs_cmd, 'executable', None)
|
||||
bw_cmd = self.get_finalized_command("bdist_wininst")
|
||||
is_wininst = getattr(bw_cmd, '_is_running', False)
|
||||
writer = ei.ScriptWriter
|
||||
if is_wininst:
|
||||
exec_param = "python.exe"
|
||||
writer = ei.WindowsScriptWriter
|
||||
if exec_param == sys.executable:
|
||||
# In case the path to the Python executable contains a space, wrap
|
||||
# it so it's not split up.
|
||||
exec_param = [exec_param]
|
||||
# resolve the writer to the environment
|
||||
writer = writer.best()
|
||||
cmd = writer.command_spec_class.best().from_param(exec_param)
|
||||
for args in writer.get_args(dist, cmd.as_header()):
|
||||
self.write_script(*args)
|
||||
|
||||
def write_script(self, script_name, contents, mode="t", *ignored):
|
||||
"""Write an executable file to the scripts directory"""
|
||||
from setuptools.command.easy_install import chmod, current_umask
|
||||
|
||||
log.info("Installing %s script to %s", script_name, self.install_dir)
|
||||
target = os.path.join(self.install_dir, script_name)
|
||||
self.outfiles.append(target)
|
||||
|
||||
mask = current_umask()
|
||||
if not self.dry_run:
|
||||
ensure_directory(target)
|
||||
f = open(target, "w" + mode)
|
||||
f.write(contents)
|
||||
f.close()
|
||||
chmod(target, 0o777 - mask)
|
||||
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%(name)s"
|
||||
type="win32"/>
|
||||
<!-- Identify the application security requirements. -->
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
||||
@@ -0,0 +1,136 @@
|
||||
import os
|
||||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
from distutils.command import sdist
|
||||
|
||||
from six.moves import filter
|
||||
|
||||
|
||||
class sdist_add_defaults:
|
||||
"""
|
||||
Mix-in providing forward-compatibility for functionality as found in
|
||||
distutils on Python 3.7.
|
||||
|
||||
Do not edit the code in this class except to update functionality
|
||||
as implemented in distutils. Instead, override in the subclass.
|
||||
"""
|
||||
|
||||
def add_defaults(self):
|
||||
"""Add all the default files to self.filelist:
|
||||
- README or README.txt
|
||||
- setup.py
|
||||
- test/test*.py
|
||||
- all pure Python modules mentioned in setup script
|
||||
- all files pointed by package_data (build_py)
|
||||
- all files defined in data_files.
|
||||
- all files defined as scripts.
|
||||
- all C sources listed as part of extensions or C libraries
|
||||
in the setup script (doesn't catch C headers!)
|
||||
Warns if (README or README.txt) or setup.py are missing; everything
|
||||
else is optional.
|
||||
"""
|
||||
self._add_defaults_standards()
|
||||
self._add_defaults_optional()
|
||||
self._add_defaults_python()
|
||||
self._add_defaults_data_files()
|
||||
self._add_defaults_ext()
|
||||
self._add_defaults_c_libs()
|
||||
self._add_defaults_scripts()
|
||||
|
||||
@staticmethod
|
||||
def _cs_path_exists(fspath):
|
||||
"""
|
||||
Case-sensitive path existence check
|
||||
|
||||
>>> sdist_add_defaults._cs_path_exists(__file__)
|
||||
True
|
||||
>>> sdist_add_defaults._cs_path_exists(__file__.upper())
|
||||
False
|
||||
"""
|
||||
if not os.path.exists(fspath):
|
||||
return False
|
||||
# make absolute so we always have a directory
|
||||
abspath = os.path.abspath(fspath)
|
||||
directory, filename = os.path.split(abspath)
|
||||
return filename in os.listdir(directory)
|
||||
|
||||
def _add_defaults_standards(self):
|
||||
standards = [self.READMES, self.distribution.script_name]
|
||||
for fn in standards:
|
||||
if isinstance(fn, tuple):
|
||||
alts = fn
|
||||
got_it = False
|
||||
for fn in alts:
|
||||
if self._cs_path_exists(fn):
|
||||
got_it = True
|
||||
self.filelist.append(fn)
|
||||
break
|
||||
|
||||
if not got_it:
|
||||
self.warn("standard file not found: should have one of " +
|
||||
', '.join(alts))
|
||||
else:
|
||||
if self._cs_path_exists(fn):
|
||||
self.filelist.append(fn)
|
||||
else:
|
||||
self.warn("standard file '%s' not found" % fn)
|
||||
|
||||
def _add_defaults_optional(self):
|
||||
optional = ['test/test*.py', 'setup.cfg']
|
||||
for pattern in optional:
|
||||
files = filter(os.path.isfile, glob(pattern))
|
||||
self.filelist.extend(files)
|
||||
|
||||
def _add_defaults_python(self):
|
||||
# build_py is used to get:
|
||||
# - python modules
|
||||
# - files defined in package_data
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
|
||||
# getting python files
|
||||
if self.distribution.has_pure_modules():
|
||||
self.filelist.extend(build_py.get_source_files())
|
||||
|
||||
# getting package_data files
|
||||
# (computed in build_py.data_files by build_py.finalize_options)
|
||||
for pkg, src_dir, build_dir, filenames in build_py.data_files:
|
||||
for filename in filenames:
|
||||
self.filelist.append(os.path.join(src_dir, filename))
|
||||
|
||||
def _add_defaults_data_files(self):
|
||||
# getting distribution.data_files
|
||||
if self.distribution.has_data_files():
|
||||
for item in self.distribution.data_files:
|
||||
if isinstance(item, str):
|
||||
# plain file
|
||||
item = convert_path(item)
|
||||
if os.path.isfile(item):
|
||||
self.filelist.append(item)
|
||||
else:
|
||||
# a (dirname, filenames) tuple
|
||||
dirname, filenames = item
|
||||
for f in filenames:
|
||||
f = convert_path(f)
|
||||
if os.path.isfile(f):
|
||||
self.filelist.append(f)
|
||||
|
||||
def _add_defaults_ext(self):
|
||||
if self.distribution.has_ext_modules():
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
self.filelist.extend(build_ext.get_source_files())
|
||||
|
||||
def _add_defaults_c_libs(self):
|
||||
if self.distribution.has_c_libraries():
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
self.filelist.extend(build_clib.get_source_files())
|
||||
|
||||
def _add_defaults_scripts(self):
|
||||
if self.distribution.has_scripts():
|
||||
build_scripts = self.get_finalized_command('build_scripts')
|
||||
self.filelist.extend(build_scripts.get_source_files())
|
||||
|
||||
|
||||
if hasattr(sdist.sdist, '_add_defaults_standards'):
|
||||
# disable the functionality already available upstream
|
||||
class sdist_add_defaults:
|
||||
pass
|
||||
@@ -0,0 +1,10 @@
|
||||
import distutils.command.register as orig
|
||||
|
||||
|
||||
class register(orig.register):
|
||||
__doc__ = orig.register.__doc__
|
||||
|
||||
def run(self):
|
||||
# Make sure that we are using valid current name/version info
|
||||
self.run_command('egg_info')
|
||||
orig.register.run(self)
|
||||
@@ -0,0 +1,66 @@
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import six
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
|
||||
class rotate(Command):
|
||||
"""Delete older distributions"""
|
||||
|
||||
description = "delete older distributions, keeping N newest files"
|
||||
user_options = [
|
||||
('match=', 'm', "patterns to match (required)"),
|
||||
('dist-dir=', 'd', "directory where the distributions are"),
|
||||
('keep=', 'k', "number of matching distributions to keep"),
|
||||
]
|
||||
|
||||
boolean_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
self.match = None
|
||||
self.dist_dir = None
|
||||
self.keep = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.match is None:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify one or more (comma-separated) match patterns "
|
||||
"(e.g. '.zip' or '.egg')"
|
||||
)
|
||||
if self.keep is None:
|
||||
raise DistutilsOptionError("Must specify number of files to keep")
|
||||
try:
|
||||
self.keep = int(self.keep)
|
||||
except ValueError:
|
||||
raise DistutilsOptionError("--keep must be an integer")
|
||||
if isinstance(self.match, six.string_types):
|
||||
self.match = [
|
||||
convert_path(p.strip()) for p in self.match.split(',')
|
||||
]
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
def run(self):
|
||||
self.run_command("egg_info")
|
||||
from glob import glob
|
||||
|
||||
for pattern in self.match:
|
||||
pattern = self.distribution.get_name() + '*' + pattern
|
||||
files = glob(os.path.join(self.dist_dir, pattern))
|
||||
files = [(os.path.getmtime(f), f) for f in files]
|
||||
files.sort()
|
||||
files.reverse()
|
||||
|
||||
log.info("%d file(s) matching %s", len(files), pattern)
|
||||
files = files[self.keep:]
|
||||
for (t, f) in files:
|
||||
log.info("Deleting %s", f)
|
||||
if not self.dry_run:
|
||||
if os.path.isdir(f):
|
||||
shutil.rmtree(f)
|
||||
else:
|
||||
os.unlink(f)
|
||||
@@ -0,0 +1,22 @@
|
||||
from setuptools.command.setopt import edit_config, option_base
|
||||
|
||||
|
||||
class saveopts(option_base):
|
||||
"""Save command-line options to a file"""
|
||||
|
||||
description = "save supplied options to setup.cfg or other config file"
|
||||
|
||||
def run(self):
|
||||
dist = self.distribution
|
||||
settings = {}
|
||||
|
||||
for cmd in dist.command_options:
|
||||
|
||||
if cmd == 'saveopts':
|
||||
continue # don't save our own options!
|
||||
|
||||
for opt, (src, val) in dist.get_option_dict(cmd).items():
|
||||
if src == "command line":
|
||||
settings.setdefault(cmd, {})[opt] = val
|
||||
|
||||
edit_config(self.filename, settings, self.dry_run)
|
||||
206
flask/lib/python3.4/site-packages/setuptools/command/sdist.py
Normal file
206
flask/lib/python3.4/site-packages/setuptools/command/sdist.py
Normal file
@@ -0,0 +1,206 @@
|
||||
from distutils import log
|
||||
import distutils.command.sdist as orig
|
||||
import os
|
||||
import sys
|
||||
import io
|
||||
import contextlib
|
||||
|
||||
import six
|
||||
|
||||
from .py36compat import sdist_add_defaults
|
||||
|
||||
import pkg_resources
|
||||
|
||||
_default_revctrl = list
|
||||
|
||||
|
||||
def walk_revctrl(dirname=''):
|
||||
"""Find all files under revision control"""
|
||||
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
|
||||
for item in ep.load()(dirname):
|
||||
yield item
|
||||
|
||||
|
||||
class sdist(sdist_add_defaults, orig.sdist):
|
||||
"""Smart sdist that finds anything supported by revision control"""
|
||||
|
||||
user_options = [
|
||||
('formats=', None,
|
||||
"formats for source distribution (comma-separated list)"),
|
||||
('keep-temp', 'k',
|
||||
"keep the distribution tree around after creating " +
|
||||
"archive file(s)"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put the source distribution archive(s) in "
|
||||
"[default: dist]"),
|
||||
]
|
||||
|
||||
negative_opt = {}
|
||||
|
||||
READMES = 'README', 'README.rst', 'README.txt'
|
||||
|
||||
def run(self):
|
||||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self.filelist = ei_cmd.filelist
|
||||
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
|
||||
self.check_readme()
|
||||
|
||||
# Run sub commands
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
# Call check_metadata only if no 'check' command
|
||||
# (distutils <= 2.6)
|
||||
import distutils.command
|
||||
|
||||
if 'check' not in distutils.command.__all__:
|
||||
self.check_metadata()
|
||||
|
||||
self.make_distribution()
|
||||
|
||||
dist_files = getattr(self.distribution, 'dist_files', [])
|
||||
for file in self.archive_files:
|
||||
data = ('sdist', '', file)
|
||||
if data not in dist_files:
|
||||
dist_files.append(data)
|
||||
|
||||
def initialize_options(self):
|
||||
orig.sdist.initialize_options(self)
|
||||
|
||||
self._default_to_gztar()
|
||||
|
||||
def _default_to_gztar(self):
|
||||
# only needed on Python prior to 3.6.
|
||||
if sys.version_info >= (3, 6, 0, 'beta', 1):
|
||||
return
|
||||
self.formats = ['gztar']
|
||||
|
||||
def make_distribution(self):
|
||||
"""
|
||||
Workaround for #516
|
||||
"""
|
||||
with self._remove_os_link():
|
||||
orig.sdist.make_distribution(self)
|
||||
|
||||
@staticmethod
|
||||
@contextlib.contextmanager
|
||||
def _remove_os_link():
|
||||
"""
|
||||
In a context, remove and restore os.link if it exists
|
||||
"""
|
||||
|
||||
class NoValue:
|
||||
pass
|
||||
|
||||
orig_val = getattr(os, 'link', NoValue)
|
||||
try:
|
||||
del os.link
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if orig_val is not NoValue:
|
||||
setattr(os, 'link', orig_val)
|
||||
|
||||
def __read_template_hack(self):
|
||||
# This grody hack closes the template file (MANIFEST.in) if an
|
||||
# exception occurs during read_template.
|
||||
# Doing so prevents an error when easy_install attempts to delete the
|
||||
# file.
|
||||
try:
|
||||
orig.sdist.read_template(self)
|
||||
except Exception:
|
||||
_, _, tb = sys.exc_info()
|
||||
tb.tb_next.tb_frame.f_locals['template'].close()
|
||||
raise
|
||||
|
||||
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
|
||||
# has been fixed, so only override the method if we're using an earlier
|
||||
# Python.
|
||||
has_leaky_handle = (
|
||||
sys.version_info < (2, 7, 2)
|
||||
or (3, 0) <= sys.version_info < (3, 1, 4)
|
||||
or (3, 2) <= sys.version_info < (3, 2, 1)
|
||||
)
|
||||
if has_leaky_handle:
|
||||
read_template = __read_template_hack
|
||||
|
||||
def _add_defaults_python(self):
|
||||
"""getting python files"""
|
||||
if self.distribution.has_pure_modules():
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
self.filelist.extend(build_py.get_source_files())
|
||||
# This functionality is incompatible with include_package_data, and
|
||||
# will in fact create an infinite recursion if include_package_data
|
||||
# is True. Use of include_package_data will imply that
|
||||
# distutils-style automatic handling of package_data is disabled
|
||||
if not self.distribution.include_package_data:
|
||||
for _, src_dir, _, filenames in build_py.data_files:
|
||||
self.filelist.extend([os.path.join(src_dir, filename)
|
||||
for filename in filenames])
|
||||
|
||||
def _add_defaults_data_files(self):
|
||||
try:
|
||||
if six.PY2:
|
||||
sdist_add_defaults._add_defaults_data_files(self)
|
||||
else:
|
||||
super()._add_defaults_data_files()
|
||||
except TypeError:
|
||||
log.warn("data_files contains unexpected objects")
|
||||
|
||||
def check_readme(self):
|
||||
for f in self.READMES:
|
||||
if os.path.exists(f):
|
||||
return
|
||||
else:
|
||||
self.warn(
|
||||
"standard file not found: should have one of " +
|
||||
', '.join(self.READMES)
|
||||
)
|
||||
|
||||
def make_release_tree(self, base_dir, files):
|
||||
orig.sdist.make_release_tree(self, base_dir, files)
|
||||
|
||||
# Save any egg_info command line options used to create this sdist
|
||||
dest = os.path.join(base_dir, 'setup.cfg')
|
||||
if hasattr(os, 'link') and os.path.exists(dest):
|
||||
# unlink and re-copy, since it might be hard-linked, and
|
||||
# we don't want to change the source version
|
||||
os.unlink(dest)
|
||||
self.copy_file('setup.cfg', dest)
|
||||
|
||||
self.get_finalized_command('egg_info').save_version_info(dest)
|
||||
|
||||
def _manifest_is_not_generated(self):
|
||||
# check for special comment used in 2.7.1 and higher
|
||||
if not os.path.isfile(self.manifest):
|
||||
return False
|
||||
|
||||
with io.open(self.manifest, 'rb') as fp:
|
||||
first_line = fp.readline()
|
||||
return (first_line !=
|
||||
'# file GENERATED by distutils, do NOT edit\n'.encode())
|
||||
|
||||
def read_manifest(self):
|
||||
"""Read the manifest file (named by 'self.manifest') and use it to
|
||||
fill in 'self.filelist', the list of files to include in the source
|
||||
distribution.
|
||||
"""
|
||||
log.info("reading manifest file '%s'", self.manifest)
|
||||
manifest = open(self.manifest, 'rb')
|
||||
for line in manifest:
|
||||
# The manifest must contain UTF-8. See #303.
|
||||
if six.PY3:
|
||||
try:
|
||||
line = line.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
log.warn("%r not UTF-8 decodable -- skipping" % line)
|
||||
continue
|
||||
# ignore comments and blank lines
|
||||
line = line.strip()
|
||||
if line.startswith('#') or not line:
|
||||
continue
|
||||
self.filelist.append(line)
|
||||
manifest.close()
|
||||
149
flask/lib/python3.4/site-packages/setuptools/command/setopt.py
Normal file
149
flask/lib/python3.4/site-packages/setuptools/command/setopt.py
Normal file
@@ -0,0 +1,149 @@
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import distutils
|
||||
import os
|
||||
|
||||
from six.moves import configparser
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
|
||||
|
||||
|
||||
def config_file(kind="local"):
|
||||
"""Get the filename of the distutils, local, global, or per-user config
|
||||
|
||||
`kind` must be one of "local", "global", or "user"
|
||||
"""
|
||||
if kind == 'local':
|
||||
return 'setup.cfg'
|
||||
if kind == 'global':
|
||||
return os.path.join(
|
||||
os.path.dirname(distutils.__file__), 'distutils.cfg'
|
||||
)
|
||||
if kind == 'user':
|
||||
dot = os.name == 'posix' and '.' or ''
|
||||
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
|
||||
raise ValueError(
|
||||
"config_file() type must be 'local', 'global', or 'user'", kind
|
||||
)
|
||||
|
||||
|
||||
def edit_config(filename, settings, dry_run=False):
|
||||
"""Edit a configuration file to include `settings`
|
||||
|
||||
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
|
||||
command/section name. A ``None`` value means to delete the entire section,
|
||||
while a dictionary lists settings to be changed or deleted in that section.
|
||||
A setting of ``None`` means to delete that setting.
|
||||
"""
|
||||
log.debug("Reading configuration from %s", filename)
|
||||
opts = configparser.RawConfigParser()
|
||||
opts.read([filename])
|
||||
for section, options in settings.items():
|
||||
if options is None:
|
||||
log.info("Deleting section [%s] from %s", section, filename)
|
||||
opts.remove_section(section)
|
||||
else:
|
||||
if not opts.has_section(section):
|
||||
log.debug("Adding new section [%s] to %s", section, filename)
|
||||
opts.add_section(section)
|
||||
for option, value in options.items():
|
||||
if value is None:
|
||||
log.debug(
|
||||
"Deleting %s.%s from %s",
|
||||
section, option, filename
|
||||
)
|
||||
opts.remove_option(section, option)
|
||||
if not opts.options(section):
|
||||
log.info("Deleting empty [%s] section from %s",
|
||||
section, filename)
|
||||
opts.remove_section(section)
|
||||
else:
|
||||
log.debug(
|
||||
"Setting %s.%s to %r in %s",
|
||||
section, option, value, filename
|
||||
)
|
||||
opts.set(section, option, value)
|
||||
|
||||
log.info("Writing %s", filename)
|
||||
if not dry_run:
|
||||
with open(filename, 'w') as f:
|
||||
opts.write(f)
|
||||
|
||||
|
||||
class option_base(Command):
|
||||
"""Abstract base class for commands that mess with config files"""
|
||||
|
||||
user_options = [
|
||||
('global-config', 'g',
|
||||
"save options to the site-wide distutils.cfg file"),
|
||||
('user-config', 'u',
|
||||
"save options to the current user's pydistutils.cfg file"),
|
||||
('filename=', 'f',
|
||||
"configuration file to use (default=setup.cfg)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
'global-config', 'user-config',
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.global_config = None
|
||||
self.user_config = None
|
||||
self.filename = None
|
||||
|
||||
def finalize_options(self):
|
||||
filenames = []
|
||||
if self.global_config:
|
||||
filenames.append(config_file('global'))
|
||||
if self.user_config:
|
||||
filenames.append(config_file('user'))
|
||||
if self.filename is not None:
|
||||
filenames.append(self.filename)
|
||||
if not filenames:
|
||||
filenames.append(config_file('local'))
|
||||
if len(filenames) > 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify only one configuration file option",
|
||||
filenames
|
||||
)
|
||||
self.filename, = filenames
|
||||
|
||||
|
||||
class setopt(option_base):
|
||||
"""Save command-line options to a file"""
|
||||
|
||||
description = "set an option in setup.cfg or another config file"
|
||||
|
||||
user_options = [
|
||||
('command=', 'c', 'command to set an option for'),
|
||||
('option=', 'o', 'option to set'),
|
||||
('set-value=', 's', 'value of the option'),
|
||||
('remove', 'r', 'remove (unset) the value'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
||||
def initialize_options(self):
|
||||
option_base.initialize_options(self)
|
||||
self.command = None
|
||||
self.option = None
|
||||
self.set_value = None
|
||||
self.remove = None
|
||||
|
||||
def finalize_options(self):
|
||||
option_base.finalize_options(self)
|
||||
if self.command is None or self.option is None:
|
||||
raise DistutilsOptionError("Must specify --command *and* --option")
|
||||
if self.set_value is None and not self.remove:
|
||||
raise DistutilsOptionError("Must specify --set-value or --remove")
|
||||
|
||||
def run(self):
|
||||
edit_config(
|
||||
self.filename, {
|
||||
self.command: {self.option.replace('-', '_'): self.set_value}
|
||||
},
|
||||
self.dry_run
|
||||
)
|
||||
254
flask/lib/python3.4/site-packages/setuptools/command/test.py
Normal file
254
flask/lib/python3.4/site-packages/setuptools/command/test.py
Normal file
@@ -0,0 +1,254 @@
|
||||
import os
|
||||
import operator
|
||||
import sys
|
||||
import contextlib
|
||||
import itertools
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
from distutils import log
|
||||
from unittest import TestLoader
|
||||
|
||||
import six
|
||||
from six.moves import map, filter
|
||||
|
||||
from pkg_resources import (resource_listdir, resource_exists, normalize_path,
|
||||
working_set, _namespace_packages,
|
||||
add_activation_listener, require, EntryPoint)
|
||||
from setuptools import Command
|
||||
from setuptools.py31compat import unittest_main
|
||||
|
||||
|
||||
class ScanningLoader(TestLoader):
|
||||
def loadTestsFromModule(self, module, pattern=None):
|
||||
"""Return a suite of all tests cases contained in the given module
|
||||
|
||||
If the module is a package, load tests from all the modules in it.
|
||||
If the module has an ``additional_tests`` function, call it and add
|
||||
the return value to the tests.
|
||||
"""
|
||||
tests = []
|
||||
tests.append(TestLoader.loadTestsFromModule(self, module))
|
||||
|
||||
if hasattr(module, "additional_tests"):
|
||||
tests.append(module.additional_tests())
|
||||
|
||||
if hasattr(module, '__path__'):
|
||||
for file in resource_listdir(module.__name__, ''):
|
||||
if file.endswith('.py') and file != '__init__.py':
|
||||
submodule = module.__name__ + '.' + file[:-3]
|
||||
else:
|
||||
if resource_exists(module.__name__, file + '/__init__.py'):
|
||||
submodule = module.__name__ + '.' + file
|
||||
else:
|
||||
continue
|
||||
tests.append(self.loadTestsFromName(submodule))
|
||||
|
||||
if len(tests) != 1:
|
||||
return self.suiteClass(tests)
|
||||
else:
|
||||
return tests[0] # don't create a nested suite for only one return
|
||||
|
||||
|
||||
# adapted from jaraco.classes.properties:NonDataProperty
|
||||
class NonDataProperty(object):
|
||||
def __init__(self, fget):
|
||||
self.fget = fget
|
||||
|
||||
def __get__(self, obj, objtype=None):
|
||||
if obj is None:
|
||||
return self
|
||||
return self.fget(obj)
|
||||
|
||||
|
||||
class test(Command):
|
||||
"""Command to run unit tests after in-place build"""
|
||||
|
||||
description = "run unit tests after in-place build"
|
||||
|
||||
user_options = [
|
||||
('test-module=', 'm', "Run 'test_suite' in specified module"),
|
||||
('test-suite=', 's',
|
||||
"Test suite to run (e.g. 'some_module.test_suite')"),
|
||||
('test-runner=', 'r', "Test runner to use"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.test_suite = None
|
||||
self.test_module = None
|
||||
self.test_loader = None
|
||||
self.test_runner = None
|
||||
|
||||
def finalize_options(self):
|
||||
|
||||
if self.test_suite and self.test_module:
|
||||
msg = "You may specify a module or a suite, but not both"
|
||||
raise DistutilsOptionError(msg)
|
||||
|
||||
if self.test_suite is None:
|
||||
if self.test_module is None:
|
||||
self.test_suite = self.distribution.test_suite
|
||||
else:
|
||||
self.test_suite = self.test_module + ".test_suite"
|
||||
|
||||
if self.test_loader is None:
|
||||
self.test_loader = getattr(self.distribution, 'test_loader', None)
|
||||
if self.test_loader is None:
|
||||
self.test_loader = "setuptools.command.test:ScanningLoader"
|
||||
if self.test_runner is None:
|
||||
self.test_runner = getattr(self.distribution, 'test_runner', None)
|
||||
|
||||
@NonDataProperty
|
||||
def test_args(self):
|
||||
return list(self._test_args())
|
||||
|
||||
def _test_args(self):
|
||||
if self.verbose:
|
||||
yield '--verbose'
|
||||
if self.test_suite:
|
||||
yield self.test_suite
|
||||
|
||||
def with_project_on_sys_path(self, func):
|
||||
"""
|
||||
Backward compatibility for project_on_sys_path context.
|
||||
"""
|
||||
with self.project_on_sys_path():
|
||||
func()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def project_on_sys_path(self, include_dists=[]):
|
||||
with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
|
||||
|
||||
if with_2to3:
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
# Ensure metadata is up-to-date
|
||||
self.reinitialize_command('build_py', inplace=0)
|
||||
self.run_command('build_py')
|
||||
bpy_cmd = self.get_finalized_command("build_py")
|
||||
build_path = normalize_path(bpy_cmd.build_lib)
|
||||
|
||||
# Build extensions
|
||||
self.reinitialize_command('egg_info', egg_base=build_path)
|
||||
self.run_command('egg_info')
|
||||
|
||||
self.reinitialize_command('build_ext', inplace=0)
|
||||
self.run_command('build_ext')
|
||||
else:
|
||||
# Without 2to3 inplace works fine:
|
||||
self.run_command('egg_info')
|
||||
|
||||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
|
||||
old_path = sys.path[:]
|
||||
old_modules = sys.modules.copy()
|
||||
|
||||
try:
|
||||
project_path = normalize_path(ei_cmd.egg_base)
|
||||
sys.path.insert(0, project_path)
|
||||
working_set.__init__()
|
||||
add_activation_listener(lambda dist: dist.activate())
|
||||
require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
|
||||
with self.paths_on_pythonpath([project_path]):
|
||||
yield
|
||||
finally:
|
||||
sys.path[:] = old_path
|
||||
sys.modules.clear()
|
||||
sys.modules.update(old_modules)
|
||||
working_set.__init__()
|
||||
|
||||
@staticmethod
|
||||
@contextlib.contextmanager
|
||||
def paths_on_pythonpath(paths):
|
||||
"""
|
||||
Add the indicated paths to the head of the PYTHONPATH environment
|
||||
variable so that subprocesses will also see the packages at
|
||||
these paths.
|
||||
|
||||
Do this in a context that restores the value on exit.
|
||||
"""
|
||||
nothing = object()
|
||||
orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
|
||||
current_pythonpath = os.environ.get('PYTHONPATH', '')
|
||||
try:
|
||||
prefix = os.pathsep.join(paths)
|
||||
to_join = filter(None, [prefix, current_pythonpath])
|
||||
new_path = os.pathsep.join(to_join)
|
||||
if new_path:
|
||||
os.environ['PYTHONPATH'] = new_path
|
||||
yield
|
||||
finally:
|
||||
if orig_pythonpath is nothing:
|
||||
os.environ.pop('PYTHONPATH', None)
|
||||
else:
|
||||
os.environ['PYTHONPATH'] = orig_pythonpath
|
||||
|
||||
@staticmethod
|
||||
def install_dists(dist):
|
||||
"""
|
||||
Install the requirements indicated by self.distribution and
|
||||
return an iterable of the dists that were built.
|
||||
"""
|
||||
ir_d = dist.fetch_build_eggs(dist.install_requires or [])
|
||||
tr_d = dist.fetch_build_eggs(dist.tests_require or [])
|
||||
return itertools.chain(ir_d, tr_d)
|
||||
|
||||
def run(self):
|
||||
installed_dists = self.install_dists(self.distribution)
|
||||
|
||||
cmd = ' '.join(self._argv)
|
||||
if self.dry_run:
|
||||
self.announce('skipping "%s" (dry run)' % cmd)
|
||||
return
|
||||
|
||||
self.announce('running "%s"' % cmd)
|
||||
|
||||
paths = map(operator.attrgetter('location'), installed_dists)
|
||||
with self.paths_on_pythonpath(paths):
|
||||
with self.project_on_sys_path():
|
||||
self.run_tests()
|
||||
|
||||
def run_tests(self):
|
||||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
module = self.test_suite.split('.')[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
if module in sys.modules:
|
||||
del_modules.append(module)
|
||||
module += '.'
|
||||
for name in sys.modules:
|
||||
if name.startswith(module):
|
||||
del_modules.append(name)
|
||||
list(map(sys.modules.__delitem__, del_modules))
|
||||
|
||||
exit_kwarg = {} if sys.version_info < (2, 7) else {"exit": False}
|
||||
test = unittest_main(
|
||||
None, None, self._argv,
|
||||
testLoader=self._resolve_as_ep(self.test_loader),
|
||||
testRunner=self._resolve_as_ep(self.test_runner),
|
||||
**exit_kwarg
|
||||
)
|
||||
if not test.result.wasSuccessful():
|
||||
msg = 'Test failed: %s' % test.result
|
||||
self.announce(msg, log.ERROR)
|
||||
raise DistutilsError(msg)
|
||||
|
||||
@property
|
||||
def _argv(self):
|
||||
return ['unittest'] + self.test_args
|
||||
|
||||
@staticmethod
|
||||
def _resolve_as_ep(val):
|
||||
"""
|
||||
Load the indicated attribute value, called, as a as if it were
|
||||
specified as an entry point.
|
||||
"""
|
||||
if val is None:
|
||||
return
|
||||
parsed = EntryPoint.parse("x=" + val)
|
||||
return parsed.resolve()()
|
||||
@@ -0,0 +1,38 @@
|
||||
import getpass
|
||||
from distutils.command import upload as orig
|
||||
|
||||
|
||||
class upload(orig.upload):
|
||||
"""
|
||||
Override default upload behavior to obtain password
|
||||
in a variety of different ways.
|
||||
"""
|
||||
|
||||
def finalize_options(self):
|
||||
orig.upload.finalize_options(self)
|
||||
# Attempt to obtain password. Short circuit evaluation at the first
|
||||
# sign of success.
|
||||
self.password = (
|
||||
self.password or
|
||||
self._load_password_from_keyring() or
|
||||
self._prompt_for_password()
|
||||
)
|
||||
|
||||
def _load_password_from_keyring(self):
|
||||
"""
|
||||
Attempt to load password from keyring. Suppress Exceptions.
|
||||
"""
|
||||
try:
|
||||
keyring = __import__('keyring')
|
||||
return keyring.get_password(self.repository, self.username)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _prompt_for_password(self):
|
||||
"""
|
||||
Prompt for a password on the tty. Suppress Exceptions.
|
||||
"""
|
||||
try:
|
||||
return getpass.getpass()
|
||||
except (Exception, KeyboardInterrupt):
|
||||
pass
|
||||
@@ -0,0 +1,206 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""upload_docs
|
||||
|
||||
Implements a Distutils 'upload_docs' subcommand (upload documentation to
|
||||
PyPI's pythonhosted.org).
|
||||
"""
|
||||
|
||||
from base64 import standard_b64encode
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
import socket
|
||||
import zipfile
|
||||
import tempfile
|
||||
import shutil
|
||||
import itertools
|
||||
import functools
|
||||
|
||||
import six
|
||||
from six.moves import http_client, urllib
|
||||
|
||||
from pkg_resources import iter_entry_points
|
||||
from .upload import upload
|
||||
|
||||
|
||||
def _encode(s):
|
||||
errors = 'surrogateescape' if six.PY3 else 'strict'
|
||||
return s.encode('utf-8', errors)
|
||||
|
||||
|
||||
class upload_docs(upload):
|
||||
# override the default repository as upload_docs isn't
|
||||
# supported by Warehouse (and won't be).
|
||||
DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
|
||||
|
||||
description = 'Upload documentation to PyPI'
|
||||
|
||||
user_options = [
|
||||
('repository=', 'r',
|
||||
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
|
||||
('show-response', None,
|
||||
'display full response text from server'),
|
||||
('upload-dir=', None, 'directory to upload'),
|
||||
]
|
||||
boolean_options = upload.boolean_options
|
||||
|
||||
def has_sphinx(self):
|
||||
if self.upload_dir is None:
|
||||
for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
|
||||
return True
|
||||
|
||||
sub_commands = [('build_sphinx', has_sphinx)]
|
||||
|
||||
def initialize_options(self):
|
||||
upload.initialize_options(self)
|
||||
self.upload_dir = None
|
||||
self.target_dir = None
|
||||
|
||||
def finalize_options(self):
|
||||
log.warn("Upload_docs command is deprecated. Use RTD instead.")
|
||||
upload.finalize_options(self)
|
||||
if self.upload_dir is None:
|
||||
if self.has_sphinx():
|
||||
build_sphinx = self.get_finalized_command('build_sphinx')
|
||||
self.target_dir = build_sphinx.builder_target_dir
|
||||
else:
|
||||
build = self.get_finalized_command('build')
|
||||
self.target_dir = os.path.join(build.build_base, 'docs')
|
||||
else:
|
||||
self.ensure_dirname('upload_dir')
|
||||
self.target_dir = self.upload_dir
|
||||
self.announce('Using upload directory %s' % self.target_dir)
|
||||
|
||||
def create_zipfile(self, filename):
|
||||
zip_file = zipfile.ZipFile(filename, "w")
|
||||
try:
|
||||
self.mkpath(self.target_dir) # just in case
|
||||
for root, dirs, files in os.walk(self.target_dir):
|
||||
if root == self.target_dir and not files:
|
||||
raise DistutilsOptionError(
|
||||
"no files found in upload directory '%s'"
|
||||
% self.target_dir)
|
||||
for name in files:
|
||||
full = os.path.join(root, name)
|
||||
relative = root[len(self.target_dir):].lstrip(os.path.sep)
|
||||
dest = os.path.join(relative, name)
|
||||
zip_file.write(full, dest)
|
||||
finally:
|
||||
zip_file.close()
|
||||
|
||||
def run(self):
|
||||
# Run sub commands
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
name = self.distribution.metadata.get_name()
|
||||
zip_file = os.path.join(tmp_dir, "%s.zip" % name)
|
||||
try:
|
||||
self.create_zipfile(zip_file)
|
||||
self.upload_file(zip_file)
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
@staticmethod
|
||||
def _build_part(item, sep_boundary):
|
||||
key, values = item
|
||||
title = '\nContent-Disposition: form-data; name="%s"' % key
|
||||
# handle multiple entries for the same name
|
||||
if not isinstance(values, list):
|
||||
values = [values]
|
||||
for value in values:
|
||||
if isinstance(value, tuple):
|
||||
title += '; filename="%s"' % value[0]
|
||||
value = value[1]
|
||||
else:
|
||||
value = _encode(value)
|
||||
yield sep_boundary
|
||||
yield _encode(title)
|
||||
yield b"\n\n"
|
||||
yield value
|
||||
if value and value[-1:] == b'\r':
|
||||
yield b'\n' # write an extra newline (lurve Macs)
|
||||
|
||||
@classmethod
|
||||
def _build_multipart(cls, data):
|
||||
"""
|
||||
Build up the MIME payload for the POST data
|
||||
"""
|
||||
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = b'\n--' + boundary
|
||||
end_boundary = sep_boundary + b'--'
|
||||
end_items = end_boundary, b"\n",
|
||||
builder = functools.partial(
|
||||
cls._build_part,
|
||||
sep_boundary=sep_boundary,
|
||||
)
|
||||
part_groups = map(builder, data.items())
|
||||
parts = itertools.chain.from_iterable(part_groups)
|
||||
body_items = itertools.chain(parts, end_items)
|
||||
content_type = 'multipart/form-data; boundary=%s' % boundary
|
||||
return b''.join(body_items), content_type
|
||||
|
||||
def upload_file(self, filename):
|
||||
with open(filename, 'rb') as f:
|
||||
content = f.read()
|
||||
meta = self.distribution.metadata
|
||||
data = {
|
||||
':action': 'doc_upload',
|
||||
'name': meta.get_name(),
|
||||
'content': (os.path.basename(filename), content),
|
||||
}
|
||||
# set up the authentication
|
||||
credentials = _encode(self.username + ':' + self.password)
|
||||
credentials = standard_b64encode(credentials)
|
||||
if six.PY3:
|
||||
credentials = credentials.decode('ascii')
|
||||
auth = "Basic " + credentials
|
||||
|
||||
body, ct = self._build_multipart(data)
|
||||
|
||||
self.announce("Submitting documentation to %s" % (self.repository),
|
||||
log.INFO)
|
||||
|
||||
# build the Request
|
||||
# We can't use urllib2 since we need to send the Basic
|
||||
# auth right with the first request
|
||||
schema, netloc, url, params, query, fragments = \
|
||||
urllib.parse.urlparse(self.repository)
|
||||
assert not params and not query and not fragments
|
||||
if schema == 'http':
|
||||
conn = http_client.HTTPConnection(netloc)
|
||||
elif schema == 'https':
|
||||
conn = http_client.HTTPSConnection(netloc)
|
||||
else:
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
|
||||
data = ''
|
||||
try:
|
||||
conn.connect()
|
||||
conn.putrequest("POST", url)
|
||||
content_type = ct
|
||||
conn.putheader('Content-type', content_type)
|
||||
conn.putheader('Content-length', str(len(body)))
|
||||
conn.putheader('Authorization', auth)
|
||||
conn.endheaders()
|
||||
conn.send(body)
|
||||
except socket.error as e:
|
||||
self.announce(str(e), log.ERROR)
|
||||
return
|
||||
|
||||
r = conn.getresponse()
|
||||
if r.status == 200:
|
||||
self.announce('Server response (%s): %s' % (r.status, r.reason),
|
||||
log.INFO)
|
||||
elif r.status == 301:
|
||||
location = r.getheader('Location')
|
||||
if location is None:
|
||||
location = 'https://pythonhosted.org/%s/' % meta.get_name()
|
||||
self.announce('Upload successful. Visit %s' % location,
|
||||
log.INFO)
|
||||
else:
|
||||
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
|
||||
log.ERROR)
|
||||
if self.show_response:
|
||||
print('-' * 75, r.read(), '-' * 75)
|
||||
558
flask/lib/python3.4/site-packages/setuptools/config.py
Normal file
558
flask/lib/python3.4/site-packages/setuptools/config.py
Normal file
@@ -0,0 +1,558 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
|
||||
from distutils.errors import DistutilsOptionError, DistutilsFileError
|
||||
from setuptools.py26compat import import_module
|
||||
from six import string_types
|
||||
|
||||
|
||||
def read_configuration(
|
||||
filepath, find_others=False, ignore_option_errors=False):
|
||||
"""Read given configuration file and returns options from it as a dict.
|
||||
|
||||
:param str|unicode filepath: Path to configuration file
|
||||
to get options from.
|
||||
|
||||
:param bool find_others: Whether to search for other configuration files
|
||||
which could be on in various places.
|
||||
|
||||
:param bool ignore_option_errors: Whether to silently ignore
|
||||
options, values of which could not be resolved (e.g. due to exceptions
|
||||
in directives such as file:, attr:, etc.).
|
||||
If False exceptions are propagated as expected.
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
from setuptools.dist import Distribution, _Distribution
|
||||
|
||||
filepath = os.path.abspath(filepath)
|
||||
|
||||
if not os.path.isfile(filepath):
|
||||
raise DistutilsFileError(
|
||||
'Configuration file %s does not exist.' % filepath)
|
||||
|
||||
current_directory = os.getcwd()
|
||||
os.chdir(os.path.dirname(filepath))
|
||||
|
||||
try:
|
||||
dist = Distribution()
|
||||
|
||||
filenames = dist.find_config_files() if find_others else []
|
||||
if filepath not in filenames:
|
||||
filenames.append(filepath)
|
||||
|
||||
_Distribution.parse_config_files(dist, filenames=filenames)
|
||||
|
||||
handlers = parse_configuration(
|
||||
dist, dist.command_options,
|
||||
ignore_option_errors=ignore_option_errors)
|
||||
|
||||
finally:
|
||||
os.chdir(current_directory)
|
||||
|
||||
return configuration_to_dict(handlers)
|
||||
|
||||
|
||||
def configuration_to_dict(handlers):
|
||||
"""Returns configuration data gathered by given handlers as a dict.
|
||||
|
||||
:param list[ConfigHandler] handlers: Handlers list,
|
||||
usually from parse_configuration()
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
config_dict = defaultdict(dict)
|
||||
|
||||
for handler in handlers:
|
||||
|
||||
obj_alias = handler.section_prefix
|
||||
target_obj = handler.target_obj
|
||||
|
||||
for option in handler.set_options:
|
||||
getter = getattr(target_obj, 'get_%s' % option, None)
|
||||
|
||||
if getter is None:
|
||||
value = getattr(target_obj, option)
|
||||
|
||||
else:
|
||||
value = getter()
|
||||
|
||||
config_dict[obj_alias][option] = value
|
||||
|
||||
return config_dict
|
||||
|
||||
|
||||
def parse_configuration(
|
||||
distribution, command_options, ignore_option_errors=False):
|
||||
"""Performs additional parsing of configuration options
|
||||
for a distribution.
|
||||
|
||||
Returns a list of used option handlers.
|
||||
|
||||
:param Distribution distribution:
|
||||
:param dict command_options:
|
||||
:param bool ignore_option_errors: Whether to silently ignore
|
||||
options, values of which could not be resolved (e.g. due to exceptions
|
||||
in directives such as file:, attr:, etc.).
|
||||
If False exceptions are propagated as expected.
|
||||
:rtype: list
|
||||
"""
|
||||
meta = ConfigMetadataHandler(
|
||||
distribution.metadata, command_options, ignore_option_errors)
|
||||
meta.parse()
|
||||
|
||||
options = ConfigOptionsHandler(
|
||||
distribution, command_options, ignore_option_errors)
|
||||
options.parse()
|
||||
|
||||
return [meta, options]
|
||||
|
||||
|
||||
class ConfigHandler(object):
|
||||
"""Handles metadata supplied in configuration files."""
|
||||
|
||||
section_prefix = None
|
||||
"""Prefix for config sections handled by this handler.
|
||||
Must be provided by class heirs.
|
||||
|
||||
"""
|
||||
|
||||
aliases = {}
|
||||
"""Options aliases.
|
||||
For compatibility with various packages. E.g.: d2to1 and pbr.
|
||||
Note: `-` in keys is replaced with `_` by config parser.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, target_obj, options, ignore_option_errors=False):
|
||||
sections = {}
|
||||
|
||||
section_prefix = self.section_prefix
|
||||
for section_name, section_options in options.items():
|
||||
if not section_name.startswith(section_prefix):
|
||||
continue
|
||||
|
||||
section_name = section_name.replace(section_prefix, '').strip('.')
|
||||
sections[section_name] = section_options
|
||||
|
||||
self.ignore_option_errors = ignore_option_errors
|
||||
self.target_obj = target_obj
|
||||
self.sections = sections
|
||||
self.set_options = []
|
||||
|
||||
@property
|
||||
def parsers(self):
|
||||
"""Metadata item name to parser function mapping."""
|
||||
raise NotImplementedError(
|
||||
'%s must provide .parsers property' % self.__class__.__name__)
|
||||
|
||||
def __setitem__(self, option_name, value):
|
||||
unknown = tuple()
|
||||
target_obj = self.target_obj
|
||||
|
||||
# Translate alias into real name.
|
||||
option_name = self.aliases.get(option_name, option_name)
|
||||
|
||||
current_value = getattr(target_obj, option_name, unknown)
|
||||
|
||||
if current_value is unknown:
|
||||
raise KeyError(option_name)
|
||||
|
||||
if current_value:
|
||||
# Already inhabited. Skipping.
|
||||
return
|
||||
|
||||
skip_option = False
|
||||
parser = self.parsers.get(option_name)
|
||||
if parser:
|
||||
try:
|
||||
value = parser(value)
|
||||
|
||||
except Exception:
|
||||
skip_option = True
|
||||
if not self.ignore_option_errors:
|
||||
raise
|
||||
|
||||
if skip_option:
|
||||
return
|
||||
|
||||
setter = getattr(target_obj, 'set_%s' % option_name, None)
|
||||
if setter is None:
|
||||
setattr(target_obj, option_name, value)
|
||||
else:
|
||||
setter(value)
|
||||
|
||||
self.set_options.append(option_name)
|
||||
|
||||
@classmethod
|
||||
def _parse_list(cls, value, separator=','):
|
||||
"""Represents value as a list.
|
||||
|
||||
Value is split either by separator (defaults to comma) or by lines.
|
||||
|
||||
:param value:
|
||||
:param separator: List items separator character.
|
||||
:rtype: list
|
||||
"""
|
||||
if isinstance(value, list): # _get_parser_compound case
|
||||
return value
|
||||
|
||||
if '\n' in value:
|
||||
value = value.splitlines()
|
||||
else:
|
||||
value = value.split(separator)
|
||||
|
||||
return [chunk.strip() for chunk in value if chunk.strip()]
|
||||
|
||||
@classmethod
|
||||
def _parse_dict(cls, value):
|
||||
"""Represents value as a dict.
|
||||
|
||||
:param value:
|
||||
:rtype: dict
|
||||
"""
|
||||
separator = '='
|
||||
result = {}
|
||||
for line in cls._parse_list(value):
|
||||
key, sep, val = line.partition(separator)
|
||||
if sep != separator:
|
||||
raise DistutilsOptionError(
|
||||
'Unable to parse option value to dict: %s' % value)
|
||||
result[key.strip()] = val.strip()
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def _parse_bool(cls, value):
|
||||
"""Represents value as boolean.
|
||||
|
||||
:param value:
|
||||
:rtype: bool
|
||||
"""
|
||||
value = value.lower()
|
||||
return value in ('1', 'true', 'yes')
|
||||
|
||||
@classmethod
|
||||
def _parse_file(cls, value):
|
||||
"""Represents value as a string, allowing including text
|
||||
from nearest files using `file:` directive.
|
||||
|
||||
Directive is sandboxed and won't reach anything outside
|
||||
directory with setup.py.
|
||||
|
||||
Examples:
|
||||
include: LICENSE
|
||||
include: src/file.txt
|
||||
|
||||
:param str value:
|
||||
:rtype: str
|
||||
"""
|
||||
if not isinstance(value, string_types):
|
||||
return value
|
||||
|
||||
include_directive = 'file:'
|
||||
if not value.startswith(include_directive):
|
||||
return value
|
||||
|
||||
current_directory = os.getcwd()
|
||||
|
||||
filepath = value.replace(include_directive, '').strip()
|
||||
filepath = os.path.abspath(filepath)
|
||||
|
||||
if not filepath.startswith(current_directory):
|
||||
raise DistutilsOptionError(
|
||||
'`file:` directive can not access %s' % filepath)
|
||||
|
||||
if os.path.isfile(filepath):
|
||||
with io.open(filepath, encoding='utf-8') as f:
|
||||
value = f.read()
|
||||
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def _parse_attr(cls, value):
|
||||
"""Represents value as a module attribute.
|
||||
|
||||
Examples:
|
||||
attr: package.attr
|
||||
attr: package.module.attr
|
||||
|
||||
:param str value:
|
||||
:rtype: str
|
||||
"""
|
||||
attr_directive = 'attr:'
|
||||
if not value.startswith(attr_directive):
|
||||
return value
|
||||
|
||||
attrs_path = value.replace(attr_directive, '').strip().split('.')
|
||||
attr_name = attrs_path.pop()
|
||||
|
||||
module_name = '.'.join(attrs_path)
|
||||
module_name = module_name or '__init__'
|
||||
|
||||
sys.path.insert(0, os.getcwd())
|
||||
try:
|
||||
module = import_module(module_name)
|
||||
value = getattr(module, attr_name)
|
||||
|
||||
finally:
|
||||
sys.path = sys.path[1:]
|
||||
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def _get_parser_compound(cls, *parse_methods):
|
||||
"""Returns parser function to represents value as a list.
|
||||
|
||||
Parses a value applying given methods one after another.
|
||||
|
||||
:param parse_methods:
|
||||
:rtype: callable
|
||||
"""
|
||||
def parse(value):
|
||||
parsed = value
|
||||
|
||||
for method in parse_methods:
|
||||
parsed = method(parsed)
|
||||
|
||||
return parsed
|
||||
|
||||
return parse
|
||||
|
||||
@classmethod
|
||||
def _parse_section_to_dict(cls, section_options, values_parser=None):
|
||||
"""Parses section options into a dictionary.
|
||||
|
||||
Optionally applies a given parser to values.
|
||||
|
||||
:param dict section_options:
|
||||
:param callable values_parser:
|
||||
:rtype: dict
|
||||
"""
|
||||
value = {}
|
||||
values_parser = values_parser or (lambda val: val)
|
||||
for key, (_, val) in section_options.items():
|
||||
value[key] = values_parser(val)
|
||||
return value
|
||||
|
||||
def parse_section(self, section_options):
|
||||
"""Parses configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
for (name, (_, value)) in section_options.items():
|
||||
try:
|
||||
self[name] = value
|
||||
|
||||
except KeyError:
|
||||
pass # Keep silent for a new option may appear anytime.
|
||||
|
||||
def parse(self):
|
||||
"""Parses configuration file items from one
|
||||
or more related sections.
|
||||
|
||||
"""
|
||||
for section_name, section_options in self.sections.items():
|
||||
|
||||
method_postfix = ''
|
||||
if section_name: # [section.option] variant
|
||||
method_postfix = '_%s' % section_name
|
||||
|
||||
section_parser_method = getattr(
|
||||
self,
|
||||
# Dots in section names are tranlsated into dunderscores.
|
||||
('parse_section%s' % method_postfix).replace('.', '__'),
|
||||
None)
|
||||
|
||||
if section_parser_method is None:
|
||||
raise DistutilsOptionError(
|
||||
'Unsupported distribution option section: [%s.%s]' % (
|
||||
self.section_prefix, section_name))
|
||||
|
||||
section_parser_method(section_options)
|
||||
|
||||
|
||||
class ConfigMetadataHandler(ConfigHandler):
|
||||
|
||||
section_prefix = 'metadata'
|
||||
|
||||
aliases = {
|
||||
'home_page': 'url',
|
||||
'summary': 'description',
|
||||
'classifier': 'classifiers',
|
||||
'platform': 'platforms',
|
||||
}
|
||||
|
||||
strict_mode = False
|
||||
"""We need to keep it loose, to be partially compatible with
|
||||
`pbr` and `d2to1` packages which also uses `metadata` section.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def parsers(self):
|
||||
"""Metadata item name to parser function mapping."""
|
||||
parse_list = self._parse_list
|
||||
parse_file = self._parse_file
|
||||
|
||||
return {
|
||||
'platforms': parse_list,
|
||||
'keywords': parse_list,
|
||||
'provides': parse_list,
|
||||
'requires': parse_list,
|
||||
'obsoletes': parse_list,
|
||||
'classifiers': self._get_parser_compound(parse_file, parse_list),
|
||||
'license': parse_file,
|
||||
'description': parse_file,
|
||||
'long_description': parse_file,
|
||||
'version': self._parse_version,
|
||||
}
|
||||
|
||||
def parse_section_classifiers(self, section_options):
|
||||
"""Parses configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
classifiers = []
|
||||
for begin, (_, rest) in section_options.items():
|
||||
classifiers.append('%s :%s' % (begin.title(), rest))
|
||||
|
||||
self['classifiers'] = classifiers
|
||||
|
||||
def _parse_version(self, value):
|
||||
"""Parses `version` option value.
|
||||
|
||||
:param value:
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
version = self._parse_attr(value)
|
||||
|
||||
if callable(version):
|
||||
version = version()
|
||||
|
||||
if not isinstance(version, string_types):
|
||||
if hasattr(version, '__iter__'):
|
||||
version = '.'.join(map(str, version))
|
||||
else:
|
||||
version = '%s' % version
|
||||
|
||||
return version
|
||||
|
||||
|
||||
class ConfigOptionsHandler(ConfigHandler):
|
||||
|
||||
section_prefix = 'options'
|
||||
|
||||
@property
|
||||
def parsers(self):
|
||||
"""Metadata item name to parser function mapping."""
|
||||
parse_list = self._parse_list
|
||||
parse_list_semicolon = partial(self._parse_list, separator=';')
|
||||
parse_bool = self._parse_bool
|
||||
parse_dict = self._parse_dict
|
||||
|
||||
return {
|
||||
'zip_safe': parse_bool,
|
||||
'use_2to3': parse_bool,
|
||||
'include_package_data': parse_bool,
|
||||
'package_dir': parse_dict,
|
||||
'use_2to3_fixers': parse_list,
|
||||
'use_2to3_exclude_fixers': parse_list,
|
||||
'convert_2to3_doctests': parse_list,
|
||||
'scripts': parse_list,
|
||||
'eager_resources': parse_list,
|
||||
'dependency_links': parse_list,
|
||||
'namespace_packages': parse_list,
|
||||
'install_requires': parse_list_semicolon,
|
||||
'setup_requires': parse_list_semicolon,
|
||||
'tests_require': parse_list_semicolon,
|
||||
'packages': self._parse_packages,
|
||||
'entry_points': self._parse_file,
|
||||
}
|
||||
|
||||
def _parse_packages(self, value):
|
||||
"""Parses `packages` option value.
|
||||
|
||||
:param value:
|
||||
:rtype: list
|
||||
"""
|
||||
find_directive = 'find:'
|
||||
|
||||
if not value.startswith(find_directive):
|
||||
return self._parse_list(value)
|
||||
|
||||
# Read function arguments from a dedicated section.
|
||||
find_kwargs = self.parse_section_packages__find(
|
||||
self.sections.get('packages.find', {}))
|
||||
|
||||
from setuptools import find_packages
|
||||
|
||||
return find_packages(**find_kwargs)
|
||||
|
||||
def parse_section_packages__find(self, section_options):
|
||||
"""Parses `packages.find` configuration file section.
|
||||
|
||||
To be used in conjunction with _parse_packages().
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
section_data = self._parse_section_to_dict(
|
||||
section_options, self._parse_list)
|
||||
|
||||
valid_keys = ['where', 'include', 'exclude']
|
||||
|
||||
find_kwargs = dict(
|
||||
[(k, v) for k, v in section_data.items() if k in valid_keys and v])
|
||||
|
||||
where = find_kwargs.get('where')
|
||||
if where is not None:
|
||||
find_kwargs['where'] = where[0] # cast list to single val
|
||||
|
||||
return find_kwargs
|
||||
|
||||
def parse_section_entry_points(self, section_options):
|
||||
"""Parses `entry_points` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
parsed = self._parse_section_to_dict(section_options, self._parse_list)
|
||||
self['entry_points'] = parsed
|
||||
|
||||
def _parse_package_data(self, section_options):
|
||||
parsed = self._parse_section_to_dict(section_options, self._parse_list)
|
||||
|
||||
root = parsed.get('*')
|
||||
if root:
|
||||
parsed[''] = root
|
||||
del parsed['*']
|
||||
|
||||
return parsed
|
||||
|
||||
def parse_section_package_data(self, section_options):
|
||||
"""Parses `package_data` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
self['package_data'] = self._parse_package_data(section_options)
|
||||
|
||||
def parse_section_exclude_package_data(self, section_options):
|
||||
"""Parses `exclude_package_data` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
self['exclude_package_data'] = self._parse_package_data(
|
||||
section_options)
|
||||
|
||||
def parse_section_extras_require(self, section_options):
|
||||
"""Parses `extras_require` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
parse_list = partial(self._parse_list, separator=';')
|
||||
self['extras_require'] = self._parse_section_to_dict(
|
||||
section_options, parse_list)
|
||||
23
flask/lib/python3.4/site-packages/setuptools/dep_util.py
Normal file
23
flask/lib/python3.4/site-packages/setuptools/dep_util.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from distutils.dep_util import newer_group
|
||||
|
||||
# yes, this is was almost entirely copy-pasted from
|
||||
# 'newer_pairwise()', this is just another convenience
|
||||
# function.
|
||||
def newer_pairwise_group(sources_groups, targets):
|
||||
"""Walk both arguments in parallel, testing if each source group is newer
|
||||
than its corresponding target. Returns a pair of lists (sources_groups,
|
||||
targets) where sources is newer than target, according to the semantics
|
||||
of 'newer_group()'.
|
||||
"""
|
||||
if len(sources_groups) != len(targets):
|
||||
raise ValueError("'sources_group' and 'targets' must be the same length")
|
||||
|
||||
# build a pair of lists (sources_groups, targets) where source is newer
|
||||
n_sources = []
|
||||
n_targets = []
|
||||
for i in range(len(sources_groups)):
|
||||
if newer_group(sources_groups[i], targets[i]):
|
||||
n_sources.append(sources_groups[i])
|
||||
n_targets.append(targets[i])
|
||||
|
||||
return n_sources, n_targets
|
||||
186
flask/lib/python3.4/site-packages/setuptools/depends.py
Normal file
186
flask/lib/python3.4/site-packages/setuptools/depends.py
Normal file
@@ -0,0 +1,186 @@
|
||||
import sys
|
||||
import imp
|
||||
import marshal
|
||||
from distutils.version import StrictVersion
|
||||
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
|
||||
|
||||
from .py33compat import Bytecode
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Require', 'find_module', 'get_module_constant', 'extract_constant'
|
||||
]
|
||||
|
||||
|
||||
class Require:
|
||||
"""A prerequisite to building or installing a distribution"""
|
||||
|
||||
def __init__(self, name, requested_version, module, homepage='',
|
||||
attribute=None, format=None):
|
||||
|
||||
if format is None and requested_version is not None:
|
||||
format = StrictVersion
|
||||
|
||||
if format is not None:
|
||||
requested_version = format(requested_version)
|
||||
if attribute is None:
|
||||
attribute = '__version__'
|
||||
|
||||
self.__dict__.update(locals())
|
||||
del self.self
|
||||
|
||||
def full_name(self):
|
||||
"""Return full package/distribution name, w/version"""
|
||||
if self.requested_version is not None:
|
||||
return '%s-%s' % (self.name, self.requested_version)
|
||||
return self.name
|
||||
|
||||
def version_ok(self, version):
|
||||
"""Is 'version' sufficiently up-to-date?"""
|
||||
return self.attribute is None or self.format is None or \
|
||||
str(version) != "unknown" and version >= self.requested_version
|
||||
|
||||
def get_version(self, paths=None, default="unknown"):
|
||||
"""Get version number of installed module, 'None', or 'default'
|
||||
|
||||
Search 'paths' for module. If not found, return 'None'. If found,
|
||||
return the extracted version attribute, or 'default' if no version
|
||||
attribute was specified, or the value cannot be determined without
|
||||
importing the module. The version is formatted according to the
|
||||
requirement's version format (if any), unless it is 'None' or the
|
||||
supplied 'default'.
|
||||
"""
|
||||
|
||||
if self.attribute is None:
|
||||
try:
|
||||
f, p, i = find_module(self.module, paths)
|
||||
if f:
|
||||
f.close()
|
||||
return default
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
v = get_module_constant(self.module, self.attribute, default, paths)
|
||||
|
||||
if v is not None and v is not default and self.format is not None:
|
||||
return self.format(v)
|
||||
|
||||
return v
|
||||
|
||||
def is_present(self, paths=None):
|
||||
"""Return true if dependency is present on 'paths'"""
|
||||
return self.get_version(paths) is not None
|
||||
|
||||
def is_current(self, paths=None):
|
||||
"""Return true if dependency is present and up-to-date on 'paths'"""
|
||||
version = self.get_version(paths)
|
||||
if version is None:
|
||||
return False
|
||||
return self.version_ok(version)
|
||||
|
||||
|
||||
def find_module(module, paths=None):
|
||||
"""Just like 'imp.find_module()', but with package support"""
|
||||
|
||||
parts = module.split('.')
|
||||
|
||||
while parts:
|
||||
part = parts.pop(0)
|
||||
f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
|
||||
|
||||
if kind == PKG_DIRECTORY:
|
||||
parts = parts or ['__init__']
|
||||
paths = [path]
|
||||
|
||||
elif parts:
|
||||
raise ImportError("Can't find %r in %s" % (parts, module))
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def get_module_constant(module, symbol, default=-1, paths=None):
|
||||
"""Find 'module' by searching 'paths', and extract 'symbol'
|
||||
|
||||
Return 'None' if 'module' does not exist on 'paths', or it does not define
|
||||
'symbol'. If the module defines 'symbol' as a constant, return the
|
||||
constant. Otherwise, return 'default'."""
|
||||
|
||||
try:
|
||||
f, path, (suffix, mode, kind) = find_module(module, paths)
|
||||
except ImportError:
|
||||
# Module doesn't exist
|
||||
return None
|
||||
|
||||
try:
|
||||
if kind == PY_COMPILED:
|
||||
f.read(8) # skip magic & date
|
||||
code = marshal.load(f)
|
||||
elif kind == PY_FROZEN:
|
||||
code = imp.get_frozen_object(module)
|
||||
elif kind == PY_SOURCE:
|
||||
code = compile(f.read(), path, 'exec')
|
||||
else:
|
||||
# Not something we can parse; we'll have to import it. :(
|
||||
if module not in sys.modules:
|
||||
imp.load_module(module, f, path, (suffix, mode, kind))
|
||||
return getattr(sys.modules[module], symbol, None)
|
||||
|
||||
finally:
|
||||
if f:
|
||||
f.close()
|
||||
|
||||
return extract_constant(code, symbol, default)
|
||||
|
||||
|
||||
def extract_constant(code, symbol, default=-1):
|
||||
"""Extract the constant value of 'symbol' from 'code'
|
||||
|
||||
If the name 'symbol' is bound to a constant value by the Python code
|
||||
object 'code', return that value. If 'symbol' is bound to an expression,
|
||||
return 'default'. Otherwise, return 'None'.
|
||||
|
||||
Return value is based on the first assignment to 'symbol'. 'symbol' must
|
||||
be a global, or at least a non-"fast" local in the code block. That is,
|
||||
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
|
||||
must be present in 'code.co_names'.
|
||||
"""
|
||||
if symbol not in code.co_names:
|
||||
# name's not there, can't possibly be an assignment
|
||||
return None
|
||||
|
||||
name_idx = list(code.co_names).index(symbol)
|
||||
|
||||
STORE_NAME = 90
|
||||
STORE_GLOBAL = 97
|
||||
LOAD_CONST = 100
|
||||
|
||||
const = default
|
||||
|
||||
for byte_code in Bytecode(code):
|
||||
op = byte_code.opcode
|
||||
arg = byte_code.arg
|
||||
|
||||
if op == LOAD_CONST:
|
||||
const = code.co_consts[arg]
|
||||
elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
|
||||
return const
|
||||
else:
|
||||
const = default
|
||||
|
||||
|
||||
def _update_globals():
|
||||
"""
|
||||
Patch the globals to remove the objects not available on some platforms.
|
||||
|
||||
XXX it'd be better to test assertions about bytecode instead.
|
||||
"""
|
||||
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
return
|
||||
incompatible = 'extract_constant', 'get_module_constant'
|
||||
for name in incompatible:
|
||||
del globals()[name]
|
||||
__all__.remove(name)
|
||||
|
||||
|
||||
_update_globals()
|
||||
925
flask/lib/python3.4/site-packages/setuptools/dist.py
Normal file
925
flask/lib/python3.4/site-packages/setuptools/dist.py
Normal file
@@ -0,0 +1,925 @@
|
||||
__all__ = ['Distribution']
|
||||
|
||||
import re
|
||||
import os
|
||||
import warnings
|
||||
import numbers
|
||||
import distutils.log
|
||||
import distutils.core
|
||||
import distutils.cmd
|
||||
import distutils.dist
|
||||
from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
|
||||
DistutilsSetupError)
|
||||
from distutils.util import rfc822_escape
|
||||
|
||||
import six
|
||||
from six.moves import map
|
||||
import packaging
|
||||
|
||||
from setuptools.depends import Require
|
||||
from setuptools import windows_support
|
||||
from setuptools.monkey import get_unpatched
|
||||
from setuptools.config import parse_configuration
|
||||
import pkg_resources
|
||||
from .py36compat import Distribution_parse_config_files
|
||||
|
||||
|
||||
def _get_unpatched(cls):
|
||||
warnings.warn("Do not call this function", DeprecationWarning)
|
||||
return get_unpatched(cls)
|
||||
|
||||
|
||||
# Based on Python 3.5 version
|
||||
def write_pkg_file(self, file):
|
||||
"""Write the PKG-INFO format data to a file object.
|
||||
"""
|
||||
version = '1.0'
|
||||
if (self.provides or self.requires or self.obsoletes or
|
||||
self.classifiers or self.download_url):
|
||||
version = '1.1'
|
||||
# Setuptools specific for PEP 345
|
||||
if hasattr(self, 'python_requires'):
|
||||
version = '1.2'
|
||||
|
||||
file.write('Metadata-Version: %s\n' % version)
|
||||
file.write('Name: %s\n' % self.get_name())
|
||||
file.write('Version: %s\n' % self.get_version())
|
||||
file.write('Summary: %s\n' % self.get_description())
|
||||
file.write('Home-page: %s\n' % self.get_url())
|
||||
file.write('Author: %s\n' % self.get_contact())
|
||||
file.write('Author-email: %s\n' % self.get_contact_email())
|
||||
file.write('License: %s\n' % self.get_license())
|
||||
if self.download_url:
|
||||
file.write('Download-URL: %s\n' % self.download_url)
|
||||
|
||||
long_desc = rfc822_escape(self.get_long_description())
|
||||
file.write('Description: %s\n' % long_desc)
|
||||
|
||||
keywords = ','.join(self.get_keywords())
|
||||
if keywords:
|
||||
file.write('Keywords: %s\n' % keywords)
|
||||
|
||||
self._write_list(file, 'Platform', self.get_platforms())
|
||||
self._write_list(file, 'Classifier', self.get_classifiers())
|
||||
|
||||
# PEP 314
|
||||
self._write_list(file, 'Requires', self.get_requires())
|
||||
self._write_list(file, 'Provides', self.get_provides())
|
||||
self._write_list(file, 'Obsoletes', self.get_obsoletes())
|
||||
|
||||
# Setuptools specific for PEP 345
|
||||
if hasattr(self, 'python_requires'):
|
||||
file.write('Requires-Python: %s\n' % self.python_requires)
|
||||
|
||||
|
||||
# from Python 3.4
|
||||
def write_pkg_info(self, base_dir):
|
||||
"""Write the PKG-INFO file into the release tree.
|
||||
"""
|
||||
with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
|
||||
encoding='UTF-8') as pkg_info:
|
||||
self.write_pkg_file(pkg_info)
|
||||
|
||||
|
||||
sequence = tuple, list
|
||||
|
||||
|
||||
def check_importable(dist, attr, value):
|
||||
try:
|
||||
ep = pkg_resources.EntryPoint.parse('x=' + value)
|
||||
assert not ep.extras
|
||||
except (TypeError, ValueError, AttributeError, AssertionError):
|
||||
raise DistutilsSetupError(
|
||||
"%r must be importable 'module:attrs' string (got %r)"
|
||||
% (attr, value)
|
||||
)
|
||||
|
||||
|
||||
def assert_string_list(dist, attr, value):
|
||||
"""Verify that value is a string list or None"""
|
||||
try:
|
||||
assert ''.join(value) != value
|
||||
except (TypeError, ValueError, AttributeError, AssertionError):
|
||||
raise DistutilsSetupError(
|
||||
"%r must be a list of strings (got %r)" % (attr, value)
|
||||
)
|
||||
|
||||
|
||||
def check_nsp(dist, attr, value):
|
||||
"""Verify that namespace packages are valid"""
|
||||
ns_packages = value
|
||||
assert_string_list(dist, attr, ns_packages)
|
||||
for nsp in ns_packages:
|
||||
if not dist.has_contents_for(nsp):
|
||||
raise DistutilsSetupError(
|
||||
"Distribution contains no modules or packages for " +
|
||||
"namespace package %r" % nsp
|
||||
)
|
||||
parent, sep, child = nsp.rpartition('.')
|
||||
if parent and parent not in ns_packages:
|
||||
distutils.log.warn(
|
||||
"WARNING: %r is declared as a package namespace, but %r"
|
||||
" is not: please correct this in setup.py", nsp, parent
|
||||
)
|
||||
|
||||
|
||||
def check_extras(dist, attr, value):
|
||||
"""Verify that extras_require mapping is valid"""
|
||||
try:
|
||||
for k, v in value.items():
|
||||
if ':' in k:
|
||||
k, m = k.split(':', 1)
|
||||
if pkg_resources.invalid_marker(m):
|
||||
raise DistutilsSetupError("Invalid environment marker: " + m)
|
||||
list(pkg_resources.parse_requirements(v))
|
||||
except (TypeError, ValueError, AttributeError):
|
||||
raise DistutilsSetupError(
|
||||
"'extras_require' must be a dictionary whose values are "
|
||||
"strings or lists of strings containing valid project/version "
|
||||
"requirement specifiers."
|
||||
)
|
||||
|
||||
|
||||
def assert_bool(dist, attr, value):
|
||||
"""Verify that value is True, False, 0, or 1"""
|
||||
if bool(value) != value:
|
||||
tmpl = "{attr!r} must be a boolean value (got {value!r})"
|
||||
raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
|
||||
|
||||
|
||||
def check_requirements(dist, attr, value):
|
||||
"""Verify that install_requires is a valid requirements list"""
|
||||
try:
|
||||
list(pkg_resources.parse_requirements(value))
|
||||
except (TypeError, ValueError) as error:
|
||||
tmpl = (
|
||||
"{attr!r} must be a string or list of strings "
|
||||
"containing valid project/version requirement specifiers; {error}"
|
||||
)
|
||||
raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
|
||||
|
||||
|
||||
def check_specifier(dist, attr, value):
|
||||
"""Verify that value is a valid version specifier"""
|
||||
try:
|
||||
packaging.specifiers.SpecifierSet(value)
|
||||
except packaging.specifiers.InvalidSpecifier as error:
|
||||
tmpl = (
|
||||
"{attr!r} must be a string or list of strings "
|
||||
"containing valid version specifiers; {error}"
|
||||
)
|
||||
raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
|
||||
|
||||
|
||||
def check_entry_points(dist, attr, value):
|
||||
"""Verify that entry_points map is parseable"""
|
||||
try:
|
||||
pkg_resources.EntryPoint.parse_map(value)
|
||||
except ValueError as e:
|
||||
raise DistutilsSetupError(e)
|
||||
|
||||
|
||||
def check_test_suite(dist, attr, value):
|
||||
if not isinstance(value, six.string_types):
|
||||
raise DistutilsSetupError("test_suite must be a string")
|
||||
|
||||
|
||||
def check_package_data(dist, attr, value):
|
||||
"""Verify that value is a dictionary of package names to glob lists"""
|
||||
if isinstance(value, dict):
|
||||
for k, v in value.items():
|
||||
if not isinstance(k, str):
|
||||
break
|
||||
try:
|
||||
iter(v)
|
||||
except TypeError:
|
||||
break
|
||||
else:
|
||||
return
|
||||
raise DistutilsSetupError(
|
||||
attr + " must be a dictionary mapping package names to lists of "
|
||||
"wildcard patterns"
|
||||
)
|
||||
|
||||
|
||||
def check_packages(dist, attr, value):
|
||||
for pkgname in value:
|
||||
if not re.match(r'\w+(\.\w+)*', pkgname):
|
||||
distutils.log.warn(
|
||||
"WARNING: %r not a valid package name; please use only "
|
||||
".-separated package names in setup.py", pkgname
|
||||
)
|
||||
|
||||
|
||||
_Distribution = get_unpatched(distutils.core.Distribution)
|
||||
|
||||
|
||||
class Distribution(Distribution_parse_config_files, _Distribution):
|
||||
"""Distribution with support for features, tests, and package data
|
||||
|
||||
This is an enhanced version of 'distutils.dist.Distribution' that
|
||||
effectively adds the following new optional keyword arguments to 'setup()':
|
||||
|
||||
'install_requires' -- a string or sequence of strings specifying project
|
||||
versions that the distribution requires when installed, in the format
|
||||
used by 'pkg_resources.require()'. They will be installed
|
||||
automatically when the package is installed. If you wish to use
|
||||
packages that are not available in PyPI, or want to give your users an
|
||||
alternate download location, you can add a 'find_links' option to the
|
||||
'[easy_install]' section of your project's 'setup.cfg' file, and then
|
||||
setuptools will scan the listed web pages for links that satisfy the
|
||||
requirements.
|
||||
|
||||
'extras_require' -- a dictionary mapping names of optional "extras" to the
|
||||
additional requirement(s) that using those extras incurs. For example,
|
||||
this::
|
||||
|
||||
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
|
||||
|
||||
indicates that the distribution can optionally provide an extra
|
||||
capability called "reST", but it can only be used if docutils and
|
||||
reSTedit are installed. If the user installs your package using
|
||||
EasyInstall and requests one of your extras, the corresponding
|
||||
additional requirements will be installed if needed.
|
||||
|
||||
'features' **deprecated** -- a dictionary mapping option names to
|
||||
'setuptools.Feature'
|
||||
objects. Features are a portion of the distribution that can be
|
||||
included or excluded based on user options, inter-feature dependencies,
|
||||
and availability on the current system. Excluded features are omitted
|
||||
from all setup commands, including source and binary distributions, so
|
||||
you can create multiple distributions from the same source tree.
|
||||
Feature names should be valid Python identifiers, except that they may
|
||||
contain the '-' (minus) sign. Features can be included or excluded
|
||||
via the command line options '--with-X' and '--without-X', where 'X' is
|
||||
the name of the feature. Whether a feature is included by default, and
|
||||
whether you are allowed to control this from the command line, is
|
||||
determined by the Feature object. See the 'Feature' class for more
|
||||
information.
|
||||
|
||||
'test_suite' -- the name of a test suite to run for the 'test' command.
|
||||
If the user runs 'python setup.py test', the package will be installed,
|
||||
and the named test suite will be run. The format is the same as
|
||||
would be used on a 'unittest.py' command line. That is, it is the
|
||||
dotted name of an object to import and call to generate a test suite.
|
||||
|
||||
'package_data' -- a dictionary mapping package names to lists of filenames
|
||||
or globs to use to find data files contained in the named packages.
|
||||
If the dictionary has filenames or globs listed under '""' (the empty
|
||||
string), those names will be searched for in every package, in addition
|
||||
to any names for the specific package. Data files found using these
|
||||
names/globs will be installed along with the package, in the same
|
||||
location as the package. Note that globs are allowed to reference
|
||||
the contents of non-package subdirectories, as long as you use '/' as
|
||||
a path separator. (Globs are automatically converted to
|
||||
platform-specific paths at runtime.)
|
||||
|
||||
In addition to these new keywords, this class also has several new methods
|
||||
for manipulating the distribution's contents. For example, the 'include()'
|
||||
and 'exclude()' methods can be thought of as in-place add and subtract
|
||||
commands that add or remove packages, modules, extensions, and so on from
|
||||
the distribution. They are used by the feature subsystem to configure the
|
||||
distribution for the included and excluded features.
|
||||
"""
|
||||
|
||||
_patched_dist = None
|
||||
|
||||
def patch_missing_pkg_info(self, attrs):
|
||||
# Fake up a replacement for the data that would normally come from
|
||||
# PKG-INFO, but which might not yet be built if this is a fresh
|
||||
# checkout.
|
||||
#
|
||||
if not attrs or 'name' not in attrs or 'version' not in attrs:
|
||||
return
|
||||
key = pkg_resources.safe_name(str(attrs['name'])).lower()
|
||||
dist = pkg_resources.working_set.by_key.get(key)
|
||||
if dist is not None and not dist.has_metadata('PKG-INFO'):
|
||||
dist._version = pkg_resources.safe_version(str(attrs['version']))
|
||||
self._patched_dist = dist
|
||||
|
||||
def __init__(self, attrs=None):
|
||||
have_package_data = hasattr(self, "package_data")
|
||||
if not have_package_data:
|
||||
self.package_data = {}
|
||||
_attrs_dict = attrs or {}
|
||||
if 'features' in _attrs_dict or 'require_features' in _attrs_dict:
|
||||
Feature.warn_deprecated()
|
||||
self.require_features = []
|
||||
self.features = {}
|
||||
self.dist_files = []
|
||||
self.src_root = attrs and attrs.pop("src_root", None)
|
||||
self.patch_missing_pkg_info(attrs)
|
||||
# Make sure we have any eggs needed to interpret 'attrs'
|
||||
if attrs is not None:
|
||||
self.dependency_links = attrs.pop('dependency_links', [])
|
||||
assert_string_list(self, 'dependency_links', self.dependency_links)
|
||||
if attrs and 'setup_requires' in attrs:
|
||||
self.fetch_build_eggs(attrs['setup_requires'])
|
||||
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
|
||||
vars(self).setdefault(ep.name, None)
|
||||
_Distribution.__init__(self, attrs)
|
||||
if isinstance(self.metadata.version, numbers.Number):
|
||||
# Some people apparently take "version number" too literally :)
|
||||
self.metadata.version = str(self.metadata.version)
|
||||
|
||||
if self.metadata.version is not None:
|
||||
try:
|
||||
ver = packaging.version.Version(self.metadata.version)
|
||||
normalized_version = str(ver)
|
||||
if self.metadata.version != normalized_version:
|
||||
warnings.warn(
|
||||
"Normalizing '%s' to '%s'" % (
|
||||
self.metadata.version,
|
||||
normalized_version,
|
||||
)
|
||||
)
|
||||
self.metadata.version = normalized_version
|
||||
except (packaging.version.InvalidVersion, TypeError):
|
||||
warnings.warn(
|
||||
"The version specified (%r) is an invalid version, this "
|
||||
"may not work as expected with newer versions of "
|
||||
"setuptools, pip, and PyPI. Please see PEP 440 for more "
|
||||
"details." % self.metadata.version
|
||||
)
|
||||
if getattr(self, 'python_requires', None):
|
||||
self.metadata.python_requires = self.python_requires
|
||||
|
||||
def parse_config_files(self, filenames=None):
|
||||
"""Parses configuration files from various levels
|
||||
and loads configuration.
|
||||
|
||||
"""
|
||||
_Distribution.parse_config_files(self, filenames=filenames)
|
||||
|
||||
parse_configuration(self, self.command_options)
|
||||
|
||||
def parse_command_line(self):
|
||||
"""Process features after parsing command line options"""
|
||||
result = _Distribution.parse_command_line(self)
|
||||
if self.features:
|
||||
self._finalize_features()
|
||||
return result
|
||||
|
||||
def _feature_attrname(self, name):
|
||||
"""Convert feature name to corresponding option attribute name"""
|
||||
return 'with_' + name.replace('-', '_')
|
||||
|
||||
def fetch_build_eggs(self, requires):
|
||||
"""Resolve pre-setup requirements"""
|
||||
resolved_dists = pkg_resources.working_set.resolve(
|
||||
pkg_resources.parse_requirements(requires),
|
||||
installer=self.fetch_build_egg,
|
||||
replace_conflicting=True,
|
||||
)
|
||||
for dist in resolved_dists:
|
||||
pkg_resources.working_set.add(dist, replace=True)
|
||||
return resolved_dists
|
||||
|
||||
def finalize_options(self):
|
||||
_Distribution.finalize_options(self)
|
||||
if self.features:
|
||||
self._set_global_opts_from_features()
|
||||
|
||||
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
|
||||
value = getattr(self, ep.name, None)
|
||||
if value is not None:
|
||||
ep.require(installer=self.fetch_build_egg)
|
||||
ep.load()(self, ep.name, value)
|
||||
if getattr(self, 'convert_2to3_doctests', None):
|
||||
# XXX may convert to set here when we can rely on set being builtin
|
||||
self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
|
||||
else:
|
||||
self.convert_2to3_doctests = []
|
||||
|
||||
def get_egg_cache_dir(self):
|
||||
egg_cache_dir = os.path.join(os.curdir, '.eggs')
|
||||
if not os.path.exists(egg_cache_dir):
|
||||
os.mkdir(egg_cache_dir)
|
||||
windows_support.hide_file(egg_cache_dir)
|
||||
readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
|
||||
with open(readme_txt_filename, 'w') as f:
|
||||
f.write('This directory contains eggs that were downloaded '
|
||||
'by setuptools to build, test, and run plug-ins.\n\n')
|
||||
f.write('This directory caches those eggs to prevent '
|
||||
'repeated downloads.\n\n')
|
||||
f.write('However, it is safe to delete this directory.\n\n')
|
||||
|
||||
return egg_cache_dir
|
||||
|
||||
def fetch_build_egg(self, req):
|
||||
"""Fetch an egg needed for building"""
|
||||
|
||||
try:
|
||||
cmd = self._egg_fetcher
|
||||
cmd.package_index.to_scan = []
|
||||
except AttributeError:
|
||||
from setuptools.command.easy_install import easy_install
|
||||
dist = self.__class__({'script_args': ['easy_install']})
|
||||
dist.parse_config_files()
|
||||
opts = dist.get_option_dict('easy_install')
|
||||
keep = (
|
||||
'find_links', 'site_dirs', 'index_url', 'optimize',
|
||||
'site_dirs', 'allow_hosts'
|
||||
)
|
||||
for key in list(opts):
|
||||
if key not in keep:
|
||||
del opts[key] # don't use any other settings
|
||||
if self.dependency_links:
|
||||
links = self.dependency_links[:]
|
||||
if 'find_links' in opts:
|
||||
links = opts['find_links'][1].split() + links
|
||||
opts['find_links'] = ('setup', links)
|
||||
install_dir = self.get_egg_cache_dir()
|
||||
cmd = easy_install(
|
||||
dist, args=["x"], install_dir=install_dir, exclude_scripts=True,
|
||||
always_copy=False, build_directory=None, editable=False,
|
||||
upgrade=False, multi_version=True, no_report=True, user=False
|
||||
)
|
||||
cmd.ensure_finalized()
|
||||
self._egg_fetcher = cmd
|
||||
return cmd.easy_install(req)
|
||||
|
||||
def _set_global_opts_from_features(self):
|
||||
"""Add --with-X/--without-X options based on optional features"""
|
||||
|
||||
go = []
|
||||
no = self.negative_opt.copy()
|
||||
|
||||
for name, feature in self.features.items():
|
||||
self._set_feature(name, None)
|
||||
feature.validate(self)
|
||||
|
||||
if feature.optional:
|
||||
descr = feature.description
|
||||
incdef = ' (default)'
|
||||
excdef = ''
|
||||
if not feature.include_by_default():
|
||||
excdef, incdef = incdef, excdef
|
||||
|
||||
go.append(('with-' + name, None, 'include ' + descr + incdef))
|
||||
go.append(('without-' + name, None, 'exclude ' + descr + excdef))
|
||||
no['without-' + name] = 'with-' + name
|
||||
|
||||
self.global_options = self.feature_options = go + self.global_options
|
||||
self.negative_opt = self.feature_negopt = no
|
||||
|
||||
def _finalize_features(self):
|
||||
"""Add/remove features and resolve dependencies between them"""
|
||||
|
||||
# First, flag all the enabled items (and thus their dependencies)
|
||||
for name, feature in self.features.items():
|
||||
enabled = self.feature_is_included(name)
|
||||
if enabled or (enabled is None and feature.include_by_default()):
|
||||
feature.include_in(self)
|
||||
self._set_feature(name, 1)
|
||||
|
||||
# Then disable the rest, so that off-by-default features don't
|
||||
# get flagged as errors when they're required by an enabled feature
|
||||
for name, feature in self.features.items():
|
||||
if not self.feature_is_included(name):
|
||||
feature.exclude_from(self)
|
||||
self._set_feature(name, 0)
|
||||
|
||||
def get_command_class(self, command):
|
||||
"""Pluggable version of get_command_class()"""
|
||||
if command in self.cmdclass:
|
||||
return self.cmdclass[command]
|
||||
|
||||
for ep in pkg_resources.iter_entry_points('distutils.commands', command):
|
||||
ep.require(installer=self.fetch_build_egg)
|
||||
self.cmdclass[command] = cmdclass = ep.load()
|
||||
return cmdclass
|
||||
else:
|
||||
return _Distribution.get_command_class(self, command)
|
||||
|
||||
def print_commands(self):
|
||||
for ep in pkg_resources.iter_entry_points('distutils.commands'):
|
||||
if ep.name not in self.cmdclass:
|
||||
# don't require extras as the commands won't be invoked
|
||||
cmdclass = ep.resolve()
|
||||
self.cmdclass[ep.name] = cmdclass
|
||||
return _Distribution.print_commands(self)
|
||||
|
||||
def get_command_list(self):
|
||||
for ep in pkg_resources.iter_entry_points('distutils.commands'):
|
||||
if ep.name not in self.cmdclass:
|
||||
# don't require extras as the commands won't be invoked
|
||||
cmdclass = ep.resolve()
|
||||
self.cmdclass[ep.name] = cmdclass
|
||||
return _Distribution.get_command_list(self)
|
||||
|
||||
def _set_feature(self, name, status):
|
||||
"""Set feature's inclusion status"""
|
||||
setattr(self, self._feature_attrname(name), status)
|
||||
|
||||
def feature_is_included(self, name):
|
||||
"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
|
||||
return getattr(self, self._feature_attrname(name))
|
||||
|
||||
def include_feature(self, name):
|
||||
"""Request inclusion of feature named 'name'"""
|
||||
|
||||
if self.feature_is_included(name) == 0:
|
||||
descr = self.features[name].description
|
||||
raise DistutilsOptionError(
|
||||
descr + " is required, but was excluded or is not available"
|
||||
)
|
||||
self.features[name].include_in(self)
|
||||
self._set_feature(name, 1)
|
||||
|
||||
def include(self, **attrs):
|
||||
"""Add items to distribution that are named in keyword arguments
|
||||
|
||||
For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
|
||||
the distribution's 'py_modules' attribute, if it was not already
|
||||
there.
|
||||
|
||||
Currently, this method only supports inclusion for attributes that are
|
||||
lists or tuples. If you need to add support for adding to other
|
||||
attributes in this or a subclass, you can add an '_include_X' method,
|
||||
where 'X' is the name of the attribute. The method will be called with
|
||||
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
|
||||
will try to call 'dist._include_foo({"bar":"baz"})', which can then
|
||||
handle whatever special inclusion logic is needed.
|
||||
"""
|
||||
for k, v in attrs.items():
|
||||
include = getattr(self, '_include_' + k, None)
|
||||
if include:
|
||||
include(v)
|
||||
else:
|
||||
self._include_misc(k, v)
|
||||
|
||||
def exclude_package(self, package):
|
||||
"""Remove packages, modules, and extensions in named package"""
|
||||
|
||||
pfx = package + '.'
|
||||
if self.packages:
|
||||
self.packages = [
|
||||
p for p in self.packages
|
||||
if p != package and not p.startswith(pfx)
|
||||
]
|
||||
|
||||
if self.py_modules:
|
||||
self.py_modules = [
|
||||
p for p in self.py_modules
|
||||
if p != package and not p.startswith(pfx)
|
||||
]
|
||||
|
||||
if self.ext_modules:
|
||||
self.ext_modules = [
|
||||
p for p in self.ext_modules
|
||||
if p.name != package and not p.name.startswith(pfx)
|
||||
]
|
||||
|
||||
def has_contents_for(self, package):
|
||||
"""Return true if 'exclude_package(package)' would do something"""
|
||||
|
||||
pfx = package + '.'
|
||||
|
||||
for p in self.iter_distribution_names():
|
||||
if p == package or p.startswith(pfx):
|
||||
return True
|
||||
|
||||
def _exclude_misc(self, name, value):
|
||||
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
|
||||
if not isinstance(value, sequence):
|
||||
raise DistutilsSetupError(
|
||||
"%s: setting must be a list or tuple (%r)" % (name, value)
|
||||
)
|
||||
try:
|
||||
old = getattr(self, name)
|
||||
except AttributeError:
|
||||
raise DistutilsSetupError(
|
||||
"%s: No such distribution setting" % name
|
||||
)
|
||||
if old is not None and not isinstance(old, sequence):
|
||||
raise DistutilsSetupError(
|
||||
name + ": this setting cannot be changed via include/exclude"
|
||||
)
|
||||
elif old:
|
||||
setattr(self, name, [item for item in old if item not in value])
|
||||
|
||||
def _include_misc(self, name, value):
|
||||
"""Handle 'include()' for list/tuple attrs without a special handler"""
|
||||
|
||||
if not isinstance(value, sequence):
|
||||
raise DistutilsSetupError(
|
||||
"%s: setting must be a list (%r)" % (name, value)
|
||||
)
|
||||
try:
|
||||
old = getattr(self, name)
|
||||
except AttributeError:
|
||||
raise DistutilsSetupError(
|
||||
"%s: No such distribution setting" % name
|
||||
)
|
||||
if old is None:
|
||||
setattr(self, name, value)
|
||||
elif not isinstance(old, sequence):
|
||||
raise DistutilsSetupError(
|
||||
name + ": this setting cannot be changed via include/exclude"
|
||||
)
|
||||
else:
|
||||
setattr(self, name, old + [item for item in value if item not in old])
|
||||
|
||||
def exclude(self, **attrs):
|
||||
"""Remove items from distribution that are named in keyword arguments
|
||||
|
||||
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
|
||||
the distribution's 'py_modules' attribute. Excluding packages uses
|
||||
the 'exclude_package()' method, so all of the package's contained
|
||||
packages, modules, and extensions are also excluded.
|
||||
|
||||
Currently, this method only supports exclusion from attributes that are
|
||||
lists or tuples. If you need to add support for excluding from other
|
||||
attributes in this or a subclass, you can add an '_exclude_X' method,
|
||||
where 'X' is the name of the attribute. The method will be called with
|
||||
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
|
||||
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
|
||||
handle whatever special exclusion logic is needed.
|
||||
"""
|
||||
for k, v in attrs.items():
|
||||
exclude = getattr(self, '_exclude_' + k, None)
|
||||
if exclude:
|
||||
exclude(v)
|
||||
else:
|
||||
self._exclude_misc(k, v)
|
||||
|
||||
def _exclude_packages(self, packages):
|
||||
if not isinstance(packages, sequence):
|
||||
raise DistutilsSetupError(
|
||||
"packages: setting must be a list or tuple (%r)" % (packages,)
|
||||
)
|
||||
list(map(self.exclude_package, packages))
|
||||
|
||||
def _parse_command_opts(self, parser, args):
|
||||
# Remove --with-X/--without-X options when processing command args
|
||||
self.global_options = self.__class__.global_options
|
||||
self.negative_opt = self.__class__.negative_opt
|
||||
|
||||
# First, expand any aliases
|
||||
command = args[0]
|
||||
aliases = self.get_option_dict('aliases')
|
||||
while command in aliases:
|
||||
src, alias = aliases[command]
|
||||
del aliases[command] # ensure each alias can expand only once!
|
||||
import shlex
|
||||
args[:1] = shlex.split(alias, True)
|
||||
command = args[0]
|
||||
|
||||
nargs = _Distribution._parse_command_opts(self, parser, args)
|
||||
|
||||
# Handle commands that want to consume all remaining arguments
|
||||
cmd_class = self.get_command_class(command)
|
||||
if getattr(cmd_class, 'command_consumes_arguments', None):
|
||||
self.get_option_dict(command)['args'] = ("command line", nargs)
|
||||
if nargs is not None:
|
||||
return []
|
||||
|
||||
return nargs
|
||||
|
||||
def get_cmdline_options(self):
|
||||
"""Return a '{cmd: {opt:val}}' map of all command-line options
|
||||
|
||||
Option names are all long, but do not include the leading '--', and
|
||||
contain dashes rather than underscores. If the option doesn't take
|
||||
an argument (e.g. '--quiet'), the 'val' is 'None'.
|
||||
|
||||
Note that options provided by config files are intentionally excluded.
|
||||
"""
|
||||
|
||||
d = {}
|
||||
|
||||
for cmd, opts in self.command_options.items():
|
||||
|
||||
for opt, (src, val) in opts.items():
|
||||
|
||||
if src != "command line":
|
||||
continue
|
||||
|
||||
opt = opt.replace('_', '-')
|
||||
|
||||
if val == 0:
|
||||
cmdobj = self.get_command_obj(cmd)
|
||||
neg_opt = self.negative_opt.copy()
|
||||
neg_opt.update(getattr(cmdobj, 'negative_opt', {}))
|
||||
for neg, pos in neg_opt.items():
|
||||
if pos == opt:
|
||||
opt = neg
|
||||
val = None
|
||||
break
|
||||
else:
|
||||
raise AssertionError("Shouldn't be able to get here")
|
||||
|
||||
elif val == 1:
|
||||
val = None
|
||||
|
||||
d.setdefault(cmd, {})[opt] = val
|
||||
|
||||
return d
|
||||
|
||||
def iter_distribution_names(self):
|
||||
"""Yield all packages, modules, and extension names in distribution"""
|
||||
|
||||
for pkg in self.packages or ():
|
||||
yield pkg
|
||||
|
||||
for module in self.py_modules or ():
|
||||
yield module
|
||||
|
||||
for ext in self.ext_modules or ():
|
||||
if isinstance(ext, tuple):
|
||||
name, buildinfo = ext
|
||||
else:
|
||||
name = ext.name
|
||||
if name.endswith('module'):
|
||||
name = name[:-6]
|
||||
yield name
|
||||
|
||||
def handle_display_options(self, option_order):
|
||||
"""If there were any non-global "display-only" options
|
||||
(--help-commands or the metadata display options) on the command
|
||||
line, display the requested info and return true; else return
|
||||
false.
|
||||
"""
|
||||
import sys
|
||||
|
||||
if six.PY2 or self.help_commands:
|
||||
return _Distribution.handle_display_options(self, option_order)
|
||||
|
||||
# Stdout may be StringIO (e.g. in tests)
|
||||
import io
|
||||
if not isinstance(sys.stdout, io.TextIOWrapper):
|
||||
return _Distribution.handle_display_options(self, option_order)
|
||||
|
||||
# Don't wrap stdout if utf-8 is already the encoding. Provides
|
||||
# workaround for #334.
|
||||
if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
|
||||
return _Distribution.handle_display_options(self, option_order)
|
||||
|
||||
# Print metadata in UTF-8 no matter the platform
|
||||
encoding = sys.stdout.encoding
|
||||
errors = sys.stdout.errors
|
||||
newline = sys.platform != 'win32' and '\n' or None
|
||||
line_buffering = sys.stdout.line_buffering
|
||||
|
||||
sys.stdout = io.TextIOWrapper(
|
||||
sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
|
||||
try:
|
||||
return _Distribution.handle_display_options(self, option_order)
|
||||
finally:
|
||||
sys.stdout = io.TextIOWrapper(
|
||||
sys.stdout.detach(), encoding, errors, newline, line_buffering)
|
||||
|
||||
|
||||
class Feature:
|
||||
"""
|
||||
**deprecated** -- The `Feature` facility was never completely implemented
|
||||
or supported, `has reported issues
|
||||
<https://github.com/pypa/setuptools/issues/58>`_ and will be removed in
|
||||
a future version.
|
||||
|
||||
A subset of the distribution that can be excluded if unneeded/wanted
|
||||
|
||||
Features are created using these keyword arguments:
|
||||
|
||||
'description' -- a short, human readable description of the feature, to
|
||||
be used in error messages, and option help messages.
|
||||
|
||||
'standard' -- if true, the feature is included by default if it is
|
||||
available on the current system. Otherwise, the feature is only
|
||||
included if requested via a command line '--with-X' option, or if
|
||||
another included feature requires it. The default setting is 'False'.
|
||||
|
||||
'available' -- if true, the feature is available for installation on the
|
||||
current system. The default setting is 'True'.
|
||||
|
||||
'optional' -- if true, the feature's inclusion can be controlled from the
|
||||
command line, using the '--with-X' or '--without-X' options. If
|
||||
false, the feature's inclusion status is determined automatically,
|
||||
based on 'availabile', 'standard', and whether any other feature
|
||||
requires it. The default setting is 'True'.
|
||||
|
||||
'require_features' -- a string or sequence of strings naming features
|
||||
that should also be included if this feature is included. Defaults to
|
||||
empty list. May also contain 'Require' objects that should be
|
||||
added/removed from the distribution.
|
||||
|
||||
'remove' -- a string or list of strings naming packages to be removed
|
||||
from the distribution if this feature is *not* included. If the
|
||||
feature *is* included, this argument is ignored. This argument exists
|
||||
to support removing features that "crosscut" a distribution, such as
|
||||
defining a 'tests' feature that removes all the 'tests' subpackages
|
||||
provided by other features. The default for this argument is an empty
|
||||
list. (Note: the named package(s) or modules must exist in the base
|
||||
distribution when the 'setup()' function is initially called.)
|
||||
|
||||
other keywords -- any other keyword arguments are saved, and passed to
|
||||
the distribution's 'include()' and 'exclude()' methods when the
|
||||
feature is included or excluded, respectively. So, for example, you
|
||||
could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
|
||||
added or removed from the distribution as appropriate.
|
||||
|
||||
A feature must include at least one 'requires', 'remove', or other
|
||||
keyword argument. Otherwise, it can't affect the distribution in any way.
|
||||
Note also that you can subclass 'Feature' to create your own specialized
|
||||
feature types that modify the distribution in other ways when included or
|
||||
excluded. See the docstrings for the various methods here for more detail.
|
||||
Aside from the methods, the only feature attributes that distributions look
|
||||
at are 'description' and 'optional'.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def warn_deprecated():
|
||||
warnings.warn(
|
||||
"Features are deprecated and will be removed in a future "
|
||||
"version. See https://github.com/pypa/setuptools/issues/65.",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
def __init__(self, description, standard=False, available=True,
|
||||
optional=True, require_features=(), remove=(), **extras):
|
||||
self.warn_deprecated()
|
||||
|
||||
self.description = description
|
||||
self.standard = standard
|
||||
self.available = available
|
||||
self.optional = optional
|
||||
if isinstance(require_features, (str, Require)):
|
||||
require_features = require_features,
|
||||
|
||||
self.require_features = [
|
||||
r for r in require_features if isinstance(r, str)
|
||||
]
|
||||
er = [r for r in require_features if not isinstance(r, str)]
|
||||
if er:
|
||||
extras['require_features'] = er
|
||||
|
||||
if isinstance(remove, str):
|
||||
remove = remove,
|
||||
self.remove = remove
|
||||
self.extras = extras
|
||||
|
||||
if not remove and not require_features and not extras:
|
||||
raise DistutilsSetupError(
|
||||
"Feature %s: must define 'require_features', 'remove', or at least one"
|
||||
" of 'packages', 'py_modules', etc."
|
||||
)
|
||||
|
||||
def include_by_default(self):
|
||||
"""Should this feature be included by default?"""
|
||||
return self.available and self.standard
|
||||
|
||||
def include_in(self, dist):
|
||||
"""Ensure feature and its requirements are included in distribution
|
||||
|
||||
You may override this in a subclass to perform additional operations on
|
||||
the distribution. Note that this method may be called more than once
|
||||
per feature, and so should be idempotent.
|
||||
|
||||
"""
|
||||
|
||||
if not self.available:
|
||||
raise DistutilsPlatformError(
|
||||
self.description + " is required, "
|
||||
"but is not available on this platform"
|
||||
)
|
||||
|
||||
dist.include(**self.extras)
|
||||
|
||||
for f in self.require_features:
|
||||
dist.include_feature(f)
|
||||
|
||||
def exclude_from(self, dist):
|
||||
"""Ensure feature is excluded from distribution
|
||||
|
||||
You may override this in a subclass to perform additional operations on
|
||||
the distribution. This method will be called at most once per
|
||||
feature, and only after all included features have been asked to
|
||||
include themselves.
|
||||
"""
|
||||
|
||||
dist.exclude(**self.extras)
|
||||
|
||||
if self.remove:
|
||||
for item in self.remove:
|
||||
dist.exclude_package(item)
|
||||
|
||||
def validate(self, dist):
|
||||
"""Verify that feature makes sense in context of distribution
|
||||
|
||||
This method is called by the distribution just before it parses its
|
||||
command line. It checks to ensure that the 'remove' attribute, if any,
|
||||
contains only valid package/module names that are present in the base
|
||||
distribution when 'setup()' is called. You may override it in a
|
||||
subclass to perform any other required validation of the feature
|
||||
against a target distribution.
|
||||
"""
|
||||
|
||||
for item in self.remove:
|
||||
if not dist.has_contents_for(item):
|
||||
raise DistutilsSetupError(
|
||||
"%s wants to be able to remove %s, but the distribution"
|
||||
" doesn't contain any packages or modules under %s"
|
||||
% (self.description, item, item)
|
||||
)
|
||||
57
flask/lib/python3.4/site-packages/setuptools/extension.py
Normal file
57
flask/lib/python3.4/site-packages/setuptools/extension.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import re
|
||||
import functools
|
||||
import distutils.core
|
||||
import distutils.errors
|
||||
import distutils.extension
|
||||
|
||||
from six.moves import map
|
||||
|
||||
from .monkey import get_unpatched
|
||||
|
||||
|
||||
def _have_cython():
|
||||
"""
|
||||
Return True if Cython can be imported.
|
||||
"""
|
||||
cython_impl = 'Cython.Distutils.build_ext'
|
||||
try:
|
||||
# from (cython_impl) import build_ext
|
||||
__import__(cython_impl, fromlist=['build_ext']).build_ext
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
# for compatibility
|
||||
have_pyrex = _have_cython
|
||||
|
||||
_Extension = get_unpatched(distutils.core.Extension)
|
||||
|
||||
|
||||
class Extension(_Extension):
|
||||
"""Extension that uses '.c' files in place of '.pyx' files"""
|
||||
|
||||
def __init__(self, name, sources, *args, **kw):
|
||||
# The *args is needed for compatibility as calls may use positional
|
||||
# arguments. py_limited_api may be set only via keyword.
|
||||
self.py_limited_api = kw.pop("py_limited_api", False)
|
||||
_Extension.__init__(self, name, sources, *args, **kw)
|
||||
|
||||
def _convert_pyx_sources_to_lang(self):
|
||||
"""
|
||||
Replace sources with .pyx extensions to sources with the target
|
||||
language extension. This mechanism allows language authors to supply
|
||||
pre-converted sources but to prefer the .pyx sources.
|
||||
"""
|
||||
if _have_cython():
|
||||
# the build has Cython, so allow it to compile the .pyx files
|
||||
return
|
||||
lang = self.language or ''
|
||||
target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
|
||||
sub = functools.partial(re.sub, '.pyx$', target_ext)
|
||||
self.sources = list(map(sub, self.sources))
|
||||
|
||||
|
||||
class Library(Extension):
|
||||
"""Just like a regular Extension, but built as a library instead"""
|
||||
176
flask/lib/python3.4/site-packages/setuptools/glob.py
Normal file
176
flask/lib/python3.4/site-packages/setuptools/glob.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
Filename globbing utility. Mostly a copy of `glob` from Python 3.5.
|
||||
|
||||
Changes include:
|
||||
* `yield from` and PEP3102 `*` removed.
|
||||
* `bytes` changed to `six.binary_type`.
|
||||
* Hidden files are not ignored.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import fnmatch
|
||||
from six import binary_type
|
||||
|
||||
__all__ = ["glob", "iglob", "escape"]
|
||||
|
||||
|
||||
def glob(pathname, recursive=False):
|
||||
"""Return a list of paths matching a pathname pattern.
|
||||
|
||||
The pattern may contain simple shell-style wildcards a la
|
||||
fnmatch. However, unlike fnmatch, filenames starting with a
|
||||
dot are special cases that are not matched by '*' and '?'
|
||||
patterns.
|
||||
|
||||
If recursive is true, the pattern '**' will match any files and
|
||||
zero or more directories and subdirectories.
|
||||
"""
|
||||
return list(iglob(pathname, recursive=recursive))
|
||||
|
||||
|
||||
def iglob(pathname, recursive=False):
|
||||
"""Return an iterator which yields the paths matching a pathname pattern.
|
||||
|
||||
The pattern may contain simple shell-style wildcards a la
|
||||
fnmatch. However, unlike fnmatch, filenames starting with a
|
||||
dot are special cases that are not matched by '*' and '?'
|
||||
patterns.
|
||||
|
||||
If recursive is true, the pattern '**' will match any files and
|
||||
zero or more directories and subdirectories.
|
||||
"""
|
||||
it = _iglob(pathname, recursive)
|
||||
if recursive and _isrecursive(pathname):
|
||||
s = next(it) # skip empty string
|
||||
assert not s
|
||||
return it
|
||||
|
||||
|
||||
def _iglob(pathname, recursive):
|
||||
dirname, basename = os.path.split(pathname)
|
||||
if not has_magic(pathname):
|
||||
if basename:
|
||||
if os.path.lexists(pathname):
|
||||
yield pathname
|
||||
else:
|
||||
# Patterns ending with a slash should match only directories
|
||||
if os.path.isdir(dirname):
|
||||
yield pathname
|
||||
return
|
||||
if not dirname:
|
||||
if recursive and _isrecursive(basename):
|
||||
for x in glob2(dirname, basename):
|
||||
yield x
|
||||
else:
|
||||
for x in glob1(dirname, basename):
|
||||
yield x
|
||||
return
|
||||
# `os.path.split()` returns the argument itself as a dirname if it is a
|
||||
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
|
||||
# contains magic characters (i.e. r'\\?\C:').
|
||||
if dirname != pathname and has_magic(dirname):
|
||||
dirs = _iglob(dirname, recursive)
|
||||
else:
|
||||
dirs = [dirname]
|
||||
if has_magic(basename):
|
||||
if recursive and _isrecursive(basename):
|
||||
glob_in_dir = glob2
|
||||
else:
|
||||
glob_in_dir = glob1
|
||||
else:
|
||||
glob_in_dir = glob0
|
||||
for dirname in dirs:
|
||||
for name in glob_in_dir(dirname, basename):
|
||||
yield os.path.join(dirname, name)
|
||||
|
||||
|
||||
# These 2 helper functions non-recursively glob inside a literal directory.
|
||||
# They return a list of basenames. `glob1` accepts a pattern while `glob0`
|
||||
# takes a literal basename (so it only has to check for its existence).
|
||||
|
||||
|
||||
def glob1(dirname, pattern):
|
||||
if not dirname:
|
||||
if isinstance(pattern, binary_type):
|
||||
dirname = os.curdir.encode('ASCII')
|
||||
else:
|
||||
dirname = os.curdir
|
||||
try:
|
||||
names = os.listdir(dirname)
|
||||
except OSError:
|
||||
return []
|
||||
return fnmatch.filter(names, pattern)
|
||||
|
||||
|
||||
def glob0(dirname, basename):
|
||||
if not basename:
|
||||
# `os.path.split()` returns an empty basename for paths ending with a
|
||||
# directory separator. 'q*x/' should match only directories.
|
||||
if os.path.isdir(dirname):
|
||||
return [basename]
|
||||
else:
|
||||
if os.path.lexists(os.path.join(dirname, basename)):
|
||||
return [basename]
|
||||
return []
|
||||
|
||||
|
||||
# This helper function recursively yields relative pathnames inside a literal
|
||||
# directory.
|
||||
|
||||
|
||||
def glob2(dirname, pattern):
|
||||
assert _isrecursive(pattern)
|
||||
yield pattern[:0]
|
||||
for x in _rlistdir(dirname):
|
||||
yield x
|
||||
|
||||
|
||||
# Recursively yields relative pathnames inside a literal directory.
|
||||
def _rlistdir(dirname):
|
||||
if not dirname:
|
||||
if isinstance(dirname, binary_type):
|
||||
dirname = binary_type(os.curdir, 'ASCII')
|
||||
else:
|
||||
dirname = os.curdir
|
||||
try:
|
||||
names = os.listdir(dirname)
|
||||
except os.error:
|
||||
return
|
||||
for x in names:
|
||||
yield x
|
||||
path = os.path.join(dirname, x) if dirname else x
|
||||
for y in _rlistdir(path):
|
||||
yield os.path.join(x, y)
|
||||
|
||||
|
||||
magic_check = re.compile('([*?[])')
|
||||
magic_check_bytes = re.compile(b'([*?[])')
|
||||
|
||||
|
||||
def has_magic(s):
|
||||
if isinstance(s, binary_type):
|
||||
match = magic_check_bytes.search(s)
|
||||
else:
|
||||
match = magic_check.search(s)
|
||||
return match is not None
|
||||
|
||||
|
||||
def _isrecursive(pattern):
|
||||
if isinstance(pattern, binary_type):
|
||||
return pattern == b'**'
|
||||
else:
|
||||
return pattern == '**'
|
||||
|
||||
|
||||
def escape(pathname):
|
||||
"""Escape all special characters.
|
||||
"""
|
||||
# Escaping is done by wrapping any of "*?[" between square brackets.
|
||||
# Metacharacters do not work in the drive part and shouldn't be escaped.
|
||||
drive, pathname = os.path.splitdrive(pathname)
|
||||
if isinstance(pathname, binary_type):
|
||||
pathname = magic_check_bytes.sub(br'[\1]', pathname)
|
||||
else:
|
||||
pathname = magic_check.sub(r'[\1]', pathname)
|
||||
return drive + pathname
|
||||
BIN
flask/lib/python3.4/site-packages/setuptools/gui-32.exe
Normal file
BIN
flask/lib/python3.4/site-packages/setuptools/gui-32.exe
Normal file
Binary file not shown.
BIN
flask/lib/python3.4/site-packages/setuptools/gui-64.exe
Normal file
BIN
flask/lib/python3.4/site-packages/setuptools/gui-64.exe
Normal file
Binary file not shown.
BIN
flask/lib/python3.4/site-packages/setuptools/gui.exe
Normal file
BIN
flask/lib/python3.4/site-packages/setuptools/gui.exe
Normal file
Binary file not shown.
35
flask/lib/python3.4/site-packages/setuptools/launch.py
Normal file
35
flask/lib/python3.4/site-packages/setuptools/launch.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Launch the Python script on the command line after
|
||||
setuptools is bootstrapped via import.
|
||||
"""
|
||||
|
||||
# Note that setuptools gets imported implicitly by the
|
||||
# invocation of this script using python -m setuptools.launch
|
||||
|
||||
import tokenize
|
||||
import sys
|
||||
|
||||
|
||||
def run():
|
||||
"""
|
||||
Run the script in sys.argv[1] as if it had
|
||||
been invoked naturally.
|
||||
"""
|
||||
__builtins__
|
||||
script_name = sys.argv[1]
|
||||
namespace = dict(
|
||||
__file__=script_name,
|
||||
__name__='__main__',
|
||||
__doc__=None,
|
||||
)
|
||||
sys.argv[:] = sys.argv[1:]
|
||||
|
||||
open_ = getattr(tokenize, 'open', open)
|
||||
script = open_(script_name).read()
|
||||
norm_script = script.replace('\\r\\n', '\\n')
|
||||
code = compile(norm_script, script_name, 'exec')
|
||||
exec(code, namespace)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
||||
62
flask/lib/python3.4/site-packages/setuptools/lib2to3_ex.py
Normal file
62
flask/lib/python3.4/site-packages/setuptools/lib2to3_ex.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""
|
||||
Customized Mixin2to3 support:
|
||||
|
||||
- adds support for converting doctests
|
||||
|
||||
|
||||
This module raises an ImportError on Python 2.
|
||||
"""
|
||||
|
||||
from distutils.util import Mixin2to3 as _Mixin2to3
|
||||
from distutils import log
|
||||
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
|
||||
|
||||
import setuptools
|
||||
|
||||
|
||||
class DistutilsRefactoringTool(RefactoringTool):
|
||||
def log_error(self, msg, *args, **kw):
|
||||
log.error(msg, *args)
|
||||
|
||||
def log_message(self, msg, *args):
|
||||
log.info(msg, *args)
|
||||
|
||||
def log_debug(self, msg, *args):
|
||||
log.debug(msg, *args)
|
||||
|
||||
|
||||
class Mixin2to3(_Mixin2to3):
|
||||
def run_2to3(self, files, doctests=False):
|
||||
# See of the distribution option has been set, otherwise check the
|
||||
# setuptools default.
|
||||
if self.distribution.use_2to3 is not True:
|
||||
return
|
||||
if not files:
|
||||
return
|
||||
log.info("Fixing " + " ".join(files))
|
||||
self.__build_fixer_names()
|
||||
self.__exclude_fixers()
|
||||
if doctests:
|
||||
if setuptools.run_2to3_on_doctests:
|
||||
r = DistutilsRefactoringTool(self.fixer_names)
|
||||
r.refactor(files, write=True, doctests_only=True)
|
||||
else:
|
||||
_Mixin2to3.run_2to3(self, files)
|
||||
|
||||
def __build_fixer_names(self):
|
||||
if self.fixer_names:
|
||||
return
|
||||
self.fixer_names = []
|
||||
for p in setuptools.lib2to3_fixer_packages:
|
||||
self.fixer_names.extend(get_fixers_from_package(p))
|
||||
if self.distribution.use_2to3_fixers is not None:
|
||||
for p in self.distribution.use_2to3_fixers:
|
||||
self.fixer_names.extend(get_fixers_from_package(p))
|
||||
|
||||
def __exclude_fixers(self):
|
||||
excluded_fixers = getattr(self, 'exclude_fixers', [])
|
||||
if self.distribution.use_2to3_exclude_fixers is not None:
|
||||
excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
|
||||
for fixer_name in excluded_fixers:
|
||||
if fixer_name in self.fixer_names:
|
||||
self.fixer_names.remove(fixer_name)
|
||||
191
flask/lib/python3.4/site-packages/setuptools/monkey.py
Normal file
191
flask/lib/python3.4/site-packages/setuptools/monkey.py
Normal file
@@ -0,0 +1,191 @@
|
||||
"""
|
||||
Monkey patching of distutils.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import distutils.filelist
|
||||
import platform
|
||||
import types
|
||||
import functools
|
||||
import inspect
|
||||
|
||||
from .py26compat import import_module
|
||||
import six
|
||||
|
||||
import setuptools
|
||||
|
||||
__all__ = []
|
||||
"""
|
||||
Everything is private. Contact the project team
|
||||
if you think you need this functionality.
|
||||
"""
|
||||
|
||||
|
||||
def get_unpatched(item):
|
||||
lookup = (
|
||||
get_unpatched_class if isinstance(item, six.class_types) else
|
||||
get_unpatched_function if isinstance(item, types.FunctionType) else
|
||||
lambda item: None
|
||||
)
|
||||
return lookup(item)
|
||||
|
||||
|
||||
def get_unpatched_class(cls):
|
||||
"""Protect against re-patching the distutils if reloaded
|
||||
|
||||
Also ensures that no other distutils extension monkeypatched the distutils
|
||||
first.
|
||||
"""
|
||||
external_bases = (
|
||||
cls
|
||||
for cls in inspect.getmro(cls)
|
||||
if not cls.__module__.startswith('setuptools')
|
||||
)
|
||||
base = next(external_bases)
|
||||
if not base.__module__.startswith('distutils'):
|
||||
msg = "distutils has already been patched by %r" % cls
|
||||
raise AssertionError(msg)
|
||||
return base
|
||||
|
||||
|
||||
def patch_all():
|
||||
# we can't patch distutils.cmd, alas
|
||||
distutils.core.Command = setuptools.Command
|
||||
|
||||
has_issue_12885 = (
|
||||
sys.version_info < (3, 4, 6)
|
||||
or
|
||||
(3, 5) < sys.version_info <= (3, 5, 3)
|
||||
or
|
||||
(3, 6) < sys.version_info
|
||||
)
|
||||
|
||||
if has_issue_12885:
|
||||
# fix findall bug in distutils (http://bugs.python.org/issue12885)
|
||||
distutils.filelist.findall = setuptools.findall
|
||||
|
||||
needs_warehouse = (
|
||||
sys.version_info < (2, 7, 13)
|
||||
or
|
||||
(3, 0) < sys.version_info < (3, 3, 7)
|
||||
or
|
||||
(3, 4) < sys.version_info < (3, 4, 6)
|
||||
or
|
||||
(3, 5) < sys.version_info <= (3, 5, 3)
|
||||
or
|
||||
(3, 6) < sys.version_info
|
||||
)
|
||||
|
||||
if needs_warehouse:
|
||||
warehouse = 'https://upload.pypi.org/legacy/'
|
||||
distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse
|
||||
|
||||
_patch_distribution_metadata_write_pkg_file()
|
||||
_patch_distribution_metadata_write_pkg_info()
|
||||
|
||||
# Install Distribution throughout the distutils
|
||||
for module in distutils.dist, distutils.core, distutils.cmd:
|
||||
module.Distribution = setuptools.dist.Distribution
|
||||
|
||||
# Install the patched Extension
|
||||
distutils.core.Extension = setuptools.extension.Extension
|
||||
distutils.extension.Extension = setuptools.extension.Extension
|
||||
if 'distutils.command.build_ext' in sys.modules:
|
||||
sys.modules['distutils.command.build_ext'].Extension = (
|
||||
setuptools.extension.Extension
|
||||
)
|
||||
|
||||
patch_for_msvc_specialized_compiler()
|
||||
|
||||
|
||||
def _patch_distribution_metadata_write_pkg_file():
|
||||
"""Patch write_pkg_file to also write Requires-Python/Requires-External"""
|
||||
distutils.dist.DistributionMetadata.write_pkg_file = (
|
||||
setuptools.dist.write_pkg_file
|
||||
)
|
||||
|
||||
|
||||
def _patch_distribution_metadata_write_pkg_info():
|
||||
"""
|
||||
Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local
|
||||
encoding to save the pkg_info. Monkey-patch its write_pkg_info method to
|
||||
correct this undesirable behavior.
|
||||
"""
|
||||
environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2)
|
||||
if not environment_local:
|
||||
return
|
||||
|
||||
distutils.dist.DistributionMetadata.write_pkg_info = (
|
||||
setuptools.dist.write_pkg_info
|
||||
)
|
||||
|
||||
|
||||
def patch_func(replacement, target_mod, func_name):
|
||||
"""
|
||||
Patch func_name in target_mod with replacement
|
||||
|
||||
Important - original must be resolved by name to avoid
|
||||
patching an already patched function.
|
||||
"""
|
||||
original = getattr(target_mod, func_name)
|
||||
|
||||
# set the 'unpatched' attribute on the replacement to
|
||||
# point to the original.
|
||||
vars(replacement).setdefault('unpatched', original)
|
||||
|
||||
# replace the function in the original module
|
||||
setattr(target_mod, func_name, replacement)
|
||||
|
||||
|
||||
def get_unpatched_function(candidate):
|
||||
return getattr(candidate, 'unpatched')
|
||||
|
||||
|
||||
def patch_for_msvc_specialized_compiler():
|
||||
"""
|
||||
Patch functions in distutils to use standalone Microsoft Visual C++
|
||||
compilers.
|
||||
"""
|
||||
# import late to avoid circular imports on Python < 3.5
|
||||
msvc = import_module('setuptools.msvc')
|
||||
|
||||
if platform.system() != 'Windows':
|
||||
# Compilers only availables on Microsoft Windows
|
||||
return
|
||||
|
||||
def patch_params(mod_name, func_name):
|
||||
"""
|
||||
Prepare the parameters for patch_func to patch indicated function.
|
||||
"""
|
||||
repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_'
|
||||
repl_name = repl_prefix + func_name.lstrip('_')
|
||||
repl = getattr(msvc, repl_name)
|
||||
mod = import_module(mod_name)
|
||||
if not hasattr(mod, func_name):
|
||||
raise ImportError(func_name)
|
||||
return repl, mod, func_name
|
||||
|
||||
# Python 2.7 to 3.4
|
||||
msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler')
|
||||
|
||||
# Python 3.5+
|
||||
msvc14 = functools.partial(patch_params, 'distutils._msvccompiler')
|
||||
|
||||
try:
|
||||
# Patch distutils.msvc9compiler
|
||||
patch_func(*msvc9('find_vcvarsall'))
|
||||
patch_func(*msvc9('query_vcvarsall'))
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Patch distutils._msvccompiler._get_vc_env
|
||||
patch_func(*msvc14('_get_vc_env'))
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Patch distutils._msvccompiler.gen_lib_options for Numpy
|
||||
patch_func(*msvc14('gen_lib_options'))
|
||||
except ImportError:
|
||||
pass
|
||||
1193
flask/lib/python3.4/site-packages/setuptools/msvc.py
Normal file
1193
flask/lib/python3.4/site-packages/setuptools/msvc.py
Normal file
File diff suppressed because it is too large
Load Diff
107
flask/lib/python3.4/site-packages/setuptools/namespaces.py
Normal file
107
flask/lib/python3.4/site-packages/setuptools/namespaces.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import os
|
||||
from distutils import log
|
||||
import itertools
|
||||
|
||||
from six.moves import map
|
||||
|
||||
|
||||
flatten = itertools.chain.from_iterable
|
||||
|
||||
|
||||
class Installer:
|
||||
|
||||
nspkg_ext = '-nspkg.pth'
|
||||
|
||||
def install_namespaces(self):
|
||||
nsp = self._get_all_ns_packages()
|
||||
if not nsp:
|
||||
return
|
||||
filename, ext = os.path.splitext(self._get_target())
|
||||
filename += self.nspkg_ext
|
||||
self.outputs.append(filename)
|
||||
log.info("Installing %s", filename)
|
||||
lines = map(self._gen_nspkg_line, nsp)
|
||||
|
||||
if self.dry_run:
|
||||
# always generate the lines, even in dry run
|
||||
list(lines)
|
||||
return
|
||||
|
||||
with open(filename, 'wt') as f:
|
||||
f.writelines(lines)
|
||||
|
||||
def uninstall_namespaces(self):
|
||||
filename, ext = os.path.splitext(self._get_target())
|
||||
filename += self.nspkg_ext
|
||||
if not os.path.exists(filename):
|
||||
return
|
||||
log.info("Removing %s", filename)
|
||||
os.remove(filename)
|
||||
|
||||
def _get_target(self):
|
||||
return self.target
|
||||
|
||||
_nspkg_tmpl = (
|
||||
"import sys, types, os",
|
||||
"has_mfs = sys.version_info > (3, 5)",
|
||||
"p = os.path.join(%(root)s, *%(pth)r)",
|
||||
"importlib = has_mfs and __import__('importlib.util')",
|
||||
"has_mfs and __import__('importlib.machinery')",
|
||||
"m = has_mfs and "
|
||||
"sys.modules.setdefault(%(pkg)r, "
|
||||
"importlib.util.module_from_spec("
|
||||
"importlib.machinery.PathFinder.find_spec(%(pkg)r, "
|
||||
"[os.path.dirname(p)])))",
|
||||
"m = m or not has_mfs and "
|
||||
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
|
||||
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
|
||||
"(p not in mp) and mp.append(p)",
|
||||
)
|
||||
"lines for the namespace installer"
|
||||
|
||||
_nspkg_tmpl_multi = (
|
||||
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
|
||||
)
|
||||
"additional line(s) when a parent package is indicated"
|
||||
|
||||
def _get_root(self):
|
||||
return "sys._getframe(1).f_locals['sitedir']"
|
||||
|
||||
def _gen_nspkg_line(self, pkg):
|
||||
# ensure pkg is not a unicode string under Python 2.7
|
||||
pkg = str(pkg)
|
||||
pth = tuple(pkg.split('.'))
|
||||
root = self._get_root()
|
||||
tmpl_lines = self._nspkg_tmpl
|
||||
parent, sep, child = pkg.rpartition('.')
|
||||
if parent:
|
||||
tmpl_lines += self._nspkg_tmpl_multi
|
||||
return ';'.join(tmpl_lines) % locals() + '\n'
|
||||
|
||||
def _get_all_ns_packages(self):
|
||||
"""Return sorted list of all package namespaces"""
|
||||
pkgs = self.distribution.namespace_packages or []
|
||||
return sorted(flatten(map(self._pkg_names, pkgs)))
|
||||
|
||||
@staticmethod
|
||||
def _pkg_names(pkg):
|
||||
"""
|
||||
Given a namespace package, yield the components of that
|
||||
package.
|
||||
|
||||
>>> names = Installer._pkg_names('a.b.c')
|
||||
>>> set(names) == set(['a', 'a.b', 'a.b.c'])
|
||||
True
|
||||
"""
|
||||
parts = pkg.split('.')
|
||||
while parts:
|
||||
yield '.'.join(parts)
|
||||
parts.pop()
|
||||
|
||||
|
||||
class DevelopInstaller(Installer):
|
||||
def _get_root(self):
|
||||
return repr(str(self.egg_path))
|
||||
|
||||
def _get_target(self):
|
||||
return self.egg_link
|
||||
1115
flask/lib/python3.4/site-packages/setuptools/package_index.py
Normal file
1115
flask/lib/python3.4/site-packages/setuptools/package_index.py
Normal file
File diff suppressed because it is too large
Load Diff
31
flask/lib/python3.4/site-packages/setuptools/py26compat.py
Normal file
31
flask/lib/python3.4/site-packages/setuptools/py26compat.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""
|
||||
Compatibility Support for Python 2.6 and earlier
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
try:
|
||||
from urllib.parse import splittag
|
||||
except ImportError:
|
||||
from urllib import splittag
|
||||
|
||||
|
||||
def strip_fragment(url):
|
||||
"""
|
||||
In `Python 8280 <http://bugs.python.org/issue8280>`_, Python 2.7 and
|
||||
later was patched to disregard the fragment when making URL requests.
|
||||
Do the same for Python 2.6 and earlier.
|
||||
"""
|
||||
url, fragment = splittag(url)
|
||||
return url
|
||||
|
||||
|
||||
if sys.version_info >= (2, 7):
|
||||
strip_fragment = lambda x: x
|
||||
|
||||
try:
|
||||
from importlib import import_module
|
||||
except ImportError:
|
||||
|
||||
def import_module(module_name):
|
||||
return __import__(module_name, fromlist=['__name__'])
|
||||
29
flask/lib/python3.4/site-packages/setuptools/py27compat.py
Normal file
29
flask/lib/python3.4/site-packages/setuptools/py27compat.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""
|
||||
Compatibility Support for Python 2.7 and earlier
|
||||
"""
|
||||
|
||||
import sys
|
||||
import platform
|
||||
|
||||
|
||||
def get_all_headers(message, key):
|
||||
"""
|
||||
Given an HTTPMessage, return all headers matching a given key.
|
||||
"""
|
||||
return message.get_all(key)
|
||||
|
||||
|
||||
if sys.version_info < (3,):
|
||||
|
||||
def get_all_headers(message, key):
|
||||
return message.getheaders(key)
|
||||
|
||||
|
||||
linux_py2_ascii = (
|
||||
platform.system() == 'Linux' and
|
||||
sys.getfilesystemencoding() == 'ascii' and
|
||||
sys.version_info < (3,)
|
||||
)
|
||||
|
||||
rmtree_safe = str if linux_py2_ascii else lambda x: x
|
||||
"""Workaround for http://bugs.python.org/issue24672"""
|
||||
56
flask/lib/python3.4/site-packages/setuptools/py31compat.py
Normal file
56
flask/lib/python3.4/site-packages/setuptools/py31compat.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
__all__ = ['get_config_vars', 'get_path']
|
||||
|
||||
try:
|
||||
# Python 2.7 or >=3.2
|
||||
from sysconfig import get_config_vars, get_path
|
||||
except ImportError:
|
||||
from distutils.sysconfig import get_config_vars, get_python_lib
|
||||
|
||||
def get_path(name):
|
||||
if name not in ('platlib', 'purelib'):
|
||||
raise ValueError("Name must be purelib or platlib")
|
||||
return get_python_lib(name == 'platlib')
|
||||
|
||||
|
||||
try:
|
||||
# Python >=3.2
|
||||
from tempfile import TemporaryDirectory
|
||||
except ImportError:
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
class TemporaryDirectory(object):
|
||||
"""
|
||||
Very simple temporary directory context manager.
|
||||
Will try to delete afterward, but will also ignore OS and similar
|
||||
errors on deletion.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.name = None # Handle mkdtemp raising an exception
|
||||
self.name = tempfile.mkdtemp()
|
||||
|
||||
def __enter__(self):
|
||||
return self.name
|
||||
|
||||
def __exit__(self, exctype, excvalue, exctrace):
|
||||
try:
|
||||
shutil.rmtree(self.name, True)
|
||||
except OSError: # removal errors are not the only possible
|
||||
pass
|
||||
self.name = None
|
||||
|
||||
|
||||
unittest_main = unittest.main
|
||||
|
||||
_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2)
|
||||
if _PY31:
|
||||
# on Python 3.1, translate testRunner==None to TextTestRunner
|
||||
# for compatibility with Python 2.6, 2.7, and 3.2+
|
||||
def unittest_main(*args, **kwargs):
|
||||
if 'testRunner' in kwargs and kwargs['testRunner'] is None:
|
||||
kwargs['testRunner'] = unittest.TextTestRunner
|
||||
return unittest.main(*args, **kwargs)
|
||||
45
flask/lib/python3.4/site-packages/setuptools/py33compat.py
Normal file
45
flask/lib/python3.4/site-packages/setuptools/py33compat.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import dis
|
||||
import array
|
||||
import collections
|
||||
|
||||
import six
|
||||
|
||||
|
||||
OpArg = collections.namedtuple('OpArg', 'opcode arg')
|
||||
|
||||
|
||||
class Bytecode_compat(object):
|
||||
def __init__(self, code):
|
||||
self.code = code
|
||||
|
||||
def __iter__(self):
|
||||
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
|
||||
|
||||
bytes = array.array('b', self.code.co_code)
|
||||
eof = len(self.code.co_code)
|
||||
|
||||
ptr = 0
|
||||
extended_arg = 0
|
||||
|
||||
while ptr < eof:
|
||||
|
||||
op = bytes[ptr]
|
||||
|
||||
if op >= dis.HAVE_ARGUMENT:
|
||||
|
||||
arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg
|
||||
ptr += 3
|
||||
|
||||
if op == dis.EXTENDED_ARG:
|
||||
long_type = six.integer_types[-1]
|
||||
extended_arg = arg * long_type(65536)
|
||||
continue
|
||||
|
||||
else:
|
||||
arg = None
|
||||
ptr += 1
|
||||
|
||||
yield OpArg(op, arg)
|
||||
|
||||
|
||||
Bytecode = getattr(dis, 'Bytecode', Bytecode_compat)
|
||||
82
flask/lib/python3.4/site-packages/setuptools/py36compat.py
Normal file
82
flask/lib/python3.4/site-packages/setuptools/py36compat.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import sys
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.util import strtobool
|
||||
from distutils.debug import DEBUG
|
||||
|
||||
|
||||
class Distribution_parse_config_files:
|
||||
"""
|
||||
Mix-in providing forward-compatibility for functionality to be
|
||||
included by default on Python 3.7.
|
||||
|
||||
Do not edit the code in this class except to update functionality
|
||||
as implemented in distutils.
|
||||
"""
|
||||
def parse_config_files(self, filenames=None):
|
||||
from configparser import ConfigParser
|
||||
|
||||
# Ignore install directory options if we have a venv
|
||||
if sys.prefix != sys.base_prefix:
|
||||
ignore_options = [
|
||||
'install-base', 'install-platbase', 'install-lib',
|
||||
'install-platlib', 'install-purelib', 'install-headers',
|
||||
'install-scripts', 'install-data', 'prefix', 'exec-prefix',
|
||||
'home', 'user', 'root']
|
||||
else:
|
||||
ignore_options = []
|
||||
|
||||
ignore_options = frozenset(ignore_options)
|
||||
|
||||
if filenames is None:
|
||||
filenames = self.find_config_files()
|
||||
|
||||
if DEBUG:
|
||||
self.announce("Distribution.parse_config_files():")
|
||||
|
||||
parser = ConfigParser(interpolation=None)
|
||||
for filename in filenames:
|
||||
if DEBUG:
|
||||
self.announce(" reading %s" % filename)
|
||||
parser.read(filename)
|
||||
for section in parser.sections():
|
||||
options = parser.options(section)
|
||||
opt_dict = self.get_option_dict(section)
|
||||
|
||||
for opt in options:
|
||||
if opt != '__name__' and opt not in ignore_options:
|
||||
val = parser.get(section,opt)
|
||||
opt = opt.replace('-', '_')
|
||||
opt_dict[opt] = (filename, val)
|
||||
|
||||
# Make the ConfigParser forget everything (so we retain
|
||||
# the original filenames that options come from)
|
||||
parser.__init__()
|
||||
|
||||
# If there was a "global" section in the config file, use it
|
||||
# to set Distribution options.
|
||||
|
||||
if 'global' in self.command_options:
|
||||
for (opt, (src, val)) in self.command_options['global'].items():
|
||||
alias = self.negative_opt.get(opt)
|
||||
try:
|
||||
if alias:
|
||||
setattr(self, alias, not strtobool(val))
|
||||
elif opt in ('verbose', 'dry_run'): # ugh!
|
||||
setattr(self, opt, strtobool(val))
|
||||
else:
|
||||
setattr(self, opt, val)
|
||||
except ValueError as msg:
|
||||
raise DistutilsOptionError(msg)
|
||||
|
||||
|
||||
if sys.version_info < (3,):
|
||||
# Python 2 behavior is sufficient
|
||||
class Distribution_parse_config_files:
|
||||
pass
|
||||
|
||||
|
||||
if False:
|
||||
# When updated behavior is available upstream,
|
||||
# disable override here.
|
||||
class Distribution_parse_config_files:
|
||||
pass
|
||||
491
flask/lib/python3.4/site-packages/setuptools/sandbox.py
Normal file
491
flask/lib/python3.4/site-packages/setuptools/sandbox.py
Normal file
@@ -0,0 +1,491 @@
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import operator
|
||||
import functools
|
||||
import itertools
|
||||
import re
|
||||
import contextlib
|
||||
import pickle
|
||||
|
||||
import six
|
||||
from six.moves import builtins, map
|
||||
|
||||
import pkg_resources
|
||||
|
||||
if sys.platform.startswith('java'):
|
||||
import org.python.modules.posix.PosixModule as _os
|
||||
else:
|
||||
_os = sys.modules[os.name]
|
||||
try:
|
||||
_file = file
|
||||
except NameError:
|
||||
_file = None
|
||||
_open = open
|
||||
from distutils.errors import DistutilsError
|
||||
from pkg_resources import working_set
|
||||
|
||||
__all__ = [
|
||||
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
|
||||
]
|
||||
|
||||
|
||||
def _execfile(filename, globals, locals=None):
|
||||
"""
|
||||
Python 3 implementation of execfile.
|
||||
"""
|
||||
mode = 'rb'
|
||||
with open(filename, mode) as stream:
|
||||
script = stream.read()
|
||||
# compile() function in Python 2.6 and 3.1 requires LF line endings.
|
||||
if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2):
|
||||
script = script.replace(b'\r\n', b'\n')
|
||||
script = script.replace(b'\r', b'\n')
|
||||
if locals is None:
|
||||
locals = globals
|
||||
code = compile(script, filename, 'exec')
|
||||
exec(code, globals, locals)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_argv(repl=None):
|
||||
saved = sys.argv[:]
|
||||
if repl is not None:
|
||||
sys.argv[:] = repl
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
sys.argv[:] = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_path():
|
||||
saved = sys.path[:]
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
sys.path[:] = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def override_temp(replacement):
|
||||
"""
|
||||
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
|
||||
"""
|
||||
if not os.path.isdir(replacement):
|
||||
os.makedirs(replacement)
|
||||
|
||||
saved = tempfile.tempdir
|
||||
|
||||
tempfile.tempdir = replacement
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
tempfile.tempdir = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pushd(target):
|
||||
saved = os.getcwd()
|
||||
os.chdir(target)
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
os.chdir(saved)
|
||||
|
||||
|
||||
class UnpickleableException(Exception):
|
||||
"""
|
||||
An exception representing another Exception that could not be pickled.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def dump(type, exc):
|
||||
"""
|
||||
Always return a dumped (pickled) type and exc. If exc can't be pickled,
|
||||
wrap it in UnpickleableException first.
|
||||
"""
|
||||
try:
|
||||
return pickle.dumps(type), pickle.dumps(exc)
|
||||
except Exception:
|
||||
# get UnpickleableException inside the sandbox
|
||||
from setuptools.sandbox import UnpickleableException as cls
|
||||
return cls.dump(cls, cls(repr(exc)))
|
||||
|
||||
|
||||
class ExceptionSaver:
|
||||
"""
|
||||
A Context Manager that will save an exception, serialized, and restore it
|
||||
later.
|
||||
"""
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, exc, tb):
|
||||
if not exc:
|
||||
return
|
||||
|
||||
# dump the exception
|
||||
self._saved = UnpickleableException.dump(type, exc)
|
||||
self._tb = tb
|
||||
|
||||
# suppress the exception
|
||||
return True
|
||||
|
||||
def resume(self):
|
||||
"restore and re-raise any exception"
|
||||
|
||||
if '_saved' not in vars(self):
|
||||
return
|
||||
|
||||
type, exc = map(pickle.loads, self._saved)
|
||||
six.reraise(type, exc, self._tb)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_modules():
|
||||
"""
|
||||
Context in which imported modules are saved.
|
||||
|
||||
Translates exceptions internal to the context into the equivalent exception
|
||||
outside the context.
|
||||
"""
|
||||
saved = sys.modules.copy()
|
||||
with ExceptionSaver() as saved_exc:
|
||||
yield saved
|
||||
|
||||
sys.modules.update(saved)
|
||||
# remove any modules imported since
|
||||
del_modules = (
|
||||
mod_name for mod_name in sys.modules
|
||||
if mod_name not in saved
|
||||
# exclude any encodings modules. See #285
|
||||
and not mod_name.startswith('encodings.')
|
||||
)
|
||||
_clear_modules(del_modules)
|
||||
|
||||
saved_exc.resume()
|
||||
|
||||
|
||||
def _clear_modules(module_names):
|
||||
for mod_name in list(module_names):
|
||||
del sys.modules[mod_name]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_pkg_resources_state():
|
||||
saved = pkg_resources.__getstate__()
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
pkg_resources.__setstate__(saved)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def setup_context(setup_dir):
|
||||
temp_dir = os.path.join(setup_dir, 'temp')
|
||||
with save_pkg_resources_state():
|
||||
with save_modules():
|
||||
hide_setuptools()
|
||||
with save_path():
|
||||
with save_argv():
|
||||
with override_temp(temp_dir):
|
||||
with pushd(setup_dir):
|
||||
# ensure setuptools commands are available
|
||||
__import__('setuptools')
|
||||
yield
|
||||
|
||||
|
||||
def _needs_hiding(mod_name):
|
||||
"""
|
||||
>>> _needs_hiding('setuptools')
|
||||
True
|
||||
>>> _needs_hiding('pkg_resources')
|
||||
True
|
||||
>>> _needs_hiding('setuptools_plugin')
|
||||
False
|
||||
>>> _needs_hiding('setuptools.__init__')
|
||||
True
|
||||
>>> _needs_hiding('distutils')
|
||||
True
|
||||
>>> _needs_hiding('os')
|
||||
False
|
||||
>>> _needs_hiding('Cython')
|
||||
True
|
||||
"""
|
||||
pattern = re.compile('(setuptools|pkg_resources|distutils|Cython)(\.|$)')
|
||||
return bool(pattern.match(mod_name))
|
||||
|
||||
|
||||
def hide_setuptools():
|
||||
"""
|
||||
Remove references to setuptools' modules from sys.modules to allow the
|
||||
invocation to import the most appropriate setuptools. This technique is
|
||||
necessary to avoid issues such as #315 where setuptools upgrading itself
|
||||
would fail to find a function declared in the metadata.
|
||||
"""
|
||||
modules = filter(_needs_hiding, sys.modules)
|
||||
_clear_modules(modules)
|
||||
|
||||
|
||||
def run_setup(setup_script, args):
|
||||
"""Run a distutils setup script, sandboxed in its directory"""
|
||||
setup_dir = os.path.abspath(os.path.dirname(setup_script))
|
||||
with setup_context(setup_dir):
|
||||
try:
|
||||
sys.argv[:] = [setup_script] + list(args)
|
||||
sys.path.insert(0, setup_dir)
|
||||
# reset to include setup dir, w/clean callback list
|
||||
working_set.__init__()
|
||||
working_set.callbacks.append(lambda dist: dist.activate())
|
||||
|
||||
# __file__ should be a byte string on Python 2 (#712)
|
||||
dunder_file = (
|
||||
setup_script
|
||||
if isinstance(setup_script, str) else
|
||||
setup_script.encode(sys.getfilesystemencoding())
|
||||
)
|
||||
|
||||
def runner():
|
||||
ns = dict(__file__=dunder_file, __name__='__main__')
|
||||
_execfile(setup_script, ns)
|
||||
|
||||
DirectorySandbox(setup_dir).run(runner)
|
||||
except SystemExit as v:
|
||||
if v.args and v.args[0]:
|
||||
raise
|
||||
# Normal exit, just return
|
||||
|
||||
|
||||
class AbstractSandbox:
|
||||
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
|
||||
|
||||
_active = False
|
||||
|
||||
def __init__(self):
|
||||
self._attrs = [
|
||||
name for name in dir(_os)
|
||||
if not name.startswith('_') and hasattr(self, name)
|
||||
]
|
||||
|
||||
def _copy(self, source):
|
||||
for name in self._attrs:
|
||||
setattr(os, name, getattr(source, name))
|
||||
|
||||
def run(self, func):
|
||||
"""Run 'func' under os sandboxing"""
|
||||
try:
|
||||
self._copy(self)
|
||||
if _file:
|
||||
builtins.file = self._file
|
||||
builtins.open = self._open
|
||||
self._active = True
|
||||
return func()
|
||||
finally:
|
||||
self._active = False
|
||||
if _file:
|
||||
builtins.file = _file
|
||||
builtins.open = _open
|
||||
self._copy(_os)
|
||||
|
||||
def _mk_dual_path_wrapper(name):
|
||||
original = getattr(_os, name)
|
||||
|
||||
def wrap(self, src, dst, *args, **kw):
|
||||
if self._active:
|
||||
src, dst = self._remap_pair(name, src, dst, *args, **kw)
|
||||
return original(src, dst, *args, **kw)
|
||||
|
||||
return wrap
|
||||
|
||||
for name in ["rename", "link", "symlink"]:
|
||||
if hasattr(_os, name):
|
||||
locals()[name] = _mk_dual_path_wrapper(name)
|
||||
|
||||
def _mk_single_path_wrapper(name, original=None):
|
||||
original = original or getattr(_os, name)
|
||||
|
||||
def wrap(self, path, *args, **kw):
|
||||
if self._active:
|
||||
path = self._remap_input(name, path, *args, **kw)
|
||||
return original(path, *args, **kw)
|
||||
|
||||
return wrap
|
||||
|
||||
if _file:
|
||||
_file = _mk_single_path_wrapper('file', _file)
|
||||
_open = _mk_single_path_wrapper('open', _open)
|
||||
for name in [
|
||||
"stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
|
||||
"remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
|
||||
"startfile", "mkfifo", "mknod", "pathconf", "access"
|
||||
]:
|
||||
if hasattr(_os, name):
|
||||
locals()[name] = _mk_single_path_wrapper(name)
|
||||
|
||||
def _mk_single_with_return(name):
|
||||
original = getattr(_os, name)
|
||||
|
||||
def wrap(self, path, *args, **kw):
|
||||
if self._active:
|
||||
path = self._remap_input(name, path, *args, **kw)
|
||||
return self._remap_output(name, original(path, *args, **kw))
|
||||
return original(path, *args, **kw)
|
||||
|
||||
return wrap
|
||||
|
||||
for name in ['readlink', 'tempnam']:
|
||||
if hasattr(_os, name):
|
||||
locals()[name] = _mk_single_with_return(name)
|
||||
|
||||
def _mk_query(name):
|
||||
original = getattr(_os, name)
|
||||
|
||||
def wrap(self, *args, **kw):
|
||||
retval = original(*args, **kw)
|
||||
if self._active:
|
||||
return self._remap_output(name, retval)
|
||||
return retval
|
||||
|
||||
return wrap
|
||||
|
||||
for name in ['getcwd', 'tmpnam']:
|
||||
if hasattr(_os, name):
|
||||
locals()[name] = _mk_query(name)
|
||||
|
||||
def _validate_path(self, path):
|
||||
"""Called to remap or validate any path, whether input or output"""
|
||||
return path
|
||||
|
||||
def _remap_input(self, operation, path, *args, **kw):
|
||||
"""Called for path inputs"""
|
||||
return self._validate_path(path)
|
||||
|
||||
def _remap_output(self, operation, path):
|
||||
"""Called for path outputs"""
|
||||
return self._validate_path(path)
|
||||
|
||||
def _remap_pair(self, operation, src, dst, *args, **kw):
|
||||
"""Called for path pairs like rename, link, and symlink operations"""
|
||||
return (
|
||||
self._remap_input(operation + '-from', src, *args, **kw),
|
||||
self._remap_input(operation + '-to', dst, *args, **kw)
|
||||
)
|
||||
|
||||
|
||||
if hasattr(os, 'devnull'):
|
||||
_EXCEPTIONS = [os.devnull,]
|
||||
else:
|
||||
_EXCEPTIONS = []
|
||||
|
||||
|
||||
class DirectorySandbox(AbstractSandbox):
|
||||
"""Restrict operations to a single subdirectory - pseudo-chroot"""
|
||||
|
||||
write_ops = dict.fromkeys([
|
||||
"open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
|
||||
"utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
|
||||
])
|
||||
|
||||
_exception_patterns = [
|
||||
# Allow lib2to3 to attempt to save a pickled grammar object (#121)
|
||||
'.*lib2to3.*\.pickle$',
|
||||
]
|
||||
"exempt writing to paths that match the pattern"
|
||||
|
||||
def __init__(self, sandbox, exceptions=_EXCEPTIONS):
|
||||
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
|
||||
self._prefix = os.path.join(self._sandbox, '')
|
||||
self._exceptions = [
|
||||
os.path.normcase(os.path.realpath(path))
|
||||
for path in exceptions
|
||||
]
|
||||
AbstractSandbox.__init__(self)
|
||||
|
||||
def _violation(self, operation, *args, **kw):
|
||||
from setuptools.sandbox import SandboxViolation
|
||||
raise SandboxViolation(operation, args, kw)
|
||||
|
||||
if _file:
|
||||
|
||||
def _file(self, path, mode='r', *args, **kw):
|
||||
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
|
||||
self._violation("file", path, mode, *args, **kw)
|
||||
return _file(path, mode, *args, **kw)
|
||||
|
||||
def _open(self, path, mode='r', *args, **kw):
|
||||
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
|
||||
self._violation("open", path, mode, *args, **kw)
|
||||
return _open(path, mode, *args, **kw)
|
||||
|
||||
def tmpnam(self):
|
||||
self._violation("tmpnam")
|
||||
|
||||
def _ok(self, path):
|
||||
active = self._active
|
||||
try:
|
||||
self._active = False
|
||||
realpath = os.path.normcase(os.path.realpath(path))
|
||||
return (
|
||||
self._exempted(realpath)
|
||||
or realpath == self._sandbox
|
||||
or realpath.startswith(self._prefix)
|
||||
)
|
||||
finally:
|
||||
self._active = active
|
||||
|
||||
def _exempted(self, filepath):
|
||||
start_matches = (
|
||||
filepath.startswith(exception)
|
||||
for exception in self._exceptions
|
||||
)
|
||||
pattern_matches = (
|
||||
re.match(pattern, filepath)
|
||||
for pattern in self._exception_patterns
|
||||
)
|
||||
candidates = itertools.chain(start_matches, pattern_matches)
|
||||
return any(candidates)
|
||||
|
||||
def _remap_input(self, operation, path, *args, **kw):
|
||||
"""Called for path inputs"""
|
||||
if operation in self.write_ops and not self._ok(path):
|
||||
self._violation(operation, os.path.realpath(path), *args, **kw)
|
||||
return path
|
||||
|
||||
def _remap_pair(self, operation, src, dst, *args, **kw):
|
||||
"""Called for path pairs like rename, link, and symlink operations"""
|
||||
if not self._ok(src) or not self._ok(dst):
|
||||
self._violation(operation, src, dst, *args, **kw)
|
||||
return (src, dst)
|
||||
|
||||
def open(self, file, flags, mode=0o777, *args, **kw):
|
||||
"""Called for low-level os.open()"""
|
||||
if flags & WRITE_FLAGS and not self._ok(file):
|
||||
self._violation("os.open", file, flags, mode, *args, **kw)
|
||||
return _os.open(file, flags, mode, *args, **kw)
|
||||
|
||||
|
||||
WRITE_FLAGS = functools.reduce(
|
||||
operator.or_, [getattr(_os, a, 0) for a in
|
||||
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
|
||||
)
|
||||
|
||||
|
||||
class SandboxViolation(DistutilsError):
|
||||
"""A setup script attempted to modify the filesystem outside the sandbox"""
|
||||
|
||||
def __str__(self):
|
||||
return """SandboxViolation: %s%r %s
|
||||
|
||||
The package setup script has attempted to modify files on your system
|
||||
that are not within the EasyInstall build area, and has been aborted.
|
||||
|
||||
This package cannot be safely installed by EasyInstall, and may not
|
||||
support alternate installation locations even if you run its setup
|
||||
script by hand. Please inform the package's author and the EasyInstall
|
||||
maintainers to find out if a fix or workaround is available.""" % self.args
|
||||
|
||||
|
||||
#
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user