mirror of
https://github.com/KevinMidboe/Node-Com-Handler.git
synced 2025-10-29 17:50:27 +00:00
Started to create a small FLASK restful api for handling requests
This commit is contained in:
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Package containing all pip commands
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from pip.commands.completion import CompletionCommand
|
||||
from pip.commands.download import DownloadCommand
|
||||
from pip.commands.freeze import FreezeCommand
|
||||
from pip.commands.hash import HashCommand
|
||||
from pip.commands.help import HelpCommand
|
||||
from pip.commands.list import ListCommand
|
||||
from pip.commands.check import CheckCommand
|
||||
from pip.commands.search import SearchCommand
|
||||
from pip.commands.show import ShowCommand
|
||||
from pip.commands.install import InstallCommand
|
||||
from pip.commands.uninstall import UninstallCommand
|
||||
from pip.commands.wheel import WheelCommand
|
||||
|
||||
|
||||
commands_dict = {
|
||||
CompletionCommand.name: CompletionCommand,
|
||||
FreezeCommand.name: FreezeCommand,
|
||||
HashCommand.name: HashCommand,
|
||||
HelpCommand.name: HelpCommand,
|
||||
SearchCommand.name: SearchCommand,
|
||||
ShowCommand.name: ShowCommand,
|
||||
InstallCommand.name: InstallCommand,
|
||||
UninstallCommand.name: UninstallCommand,
|
||||
DownloadCommand.name: DownloadCommand,
|
||||
ListCommand.name: ListCommand,
|
||||
CheckCommand.name: CheckCommand,
|
||||
WheelCommand.name: WheelCommand,
|
||||
}
|
||||
|
||||
|
||||
commands_order = [
|
||||
InstallCommand,
|
||||
DownloadCommand,
|
||||
UninstallCommand,
|
||||
FreezeCommand,
|
||||
ListCommand,
|
||||
ShowCommand,
|
||||
CheckCommand,
|
||||
SearchCommand,
|
||||
WheelCommand,
|
||||
HashCommand,
|
||||
CompletionCommand,
|
||||
HelpCommand,
|
||||
]
|
||||
|
||||
|
||||
def get_summaries(ordered=True):
|
||||
"""Yields sorted (command name, command summary) tuples."""
|
||||
|
||||
if ordered:
|
||||
cmditems = _sort_commands(commands_dict, commands_order)
|
||||
else:
|
||||
cmditems = commands_dict.items()
|
||||
|
||||
for name, command_class in cmditems:
|
||||
yield (name, command_class.summary)
|
||||
|
||||
|
||||
def get_similar_commands(name):
|
||||
"""Command name auto-correct."""
|
||||
from difflib import get_close_matches
|
||||
|
||||
name = name.lower()
|
||||
|
||||
close_commands = get_close_matches(name, commands_dict.keys())
|
||||
|
||||
if close_commands:
|
||||
return close_commands[0]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _sort_commands(cmddict, order):
|
||||
def keyfn(key):
|
||||
try:
|
||||
return order.index(key[1])
|
||||
except ValueError:
|
||||
# unordered items should come last
|
||||
return 0xff
|
||||
|
||||
return sorted(cmddict.items(), key=keyfn)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
39
v1/flask/lib/python3.4/site-packages/pip/commands/check.py
Normal file
39
v1/flask/lib/python3.4/site-packages/pip/commands/check.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import logging
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.operations.check import check_requirements
|
||||
from pip.utils import get_installed_distributions
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CheckCommand(Command):
|
||||
"""Verify installed packages have compatible dependencies."""
|
||||
name = 'check'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'Verify installed packages have compatible dependencies.'
|
||||
|
||||
def run(self, options, args):
|
||||
dists = get_installed_distributions(local_only=False, skip=())
|
||||
missing_reqs_dict, incompatible_reqs_dict = check_requirements(dists)
|
||||
|
||||
for dist in dists:
|
||||
key = '%s==%s' % (dist.project_name, dist.version)
|
||||
|
||||
for requirement in missing_reqs_dict.get(key, []):
|
||||
logger.info(
|
||||
"%s %s requires %s, which is not installed.",
|
||||
dist.project_name, dist.version, requirement.project_name)
|
||||
|
||||
for requirement, actual in incompatible_reqs_dict.get(key, []):
|
||||
logger.info(
|
||||
"%s %s has requirement %s, but you have %s %s.",
|
||||
dist.project_name, dist.version, requirement,
|
||||
actual.project_name, actual.version)
|
||||
|
||||
if missing_reqs_dict or incompatible_reqs_dict:
|
||||
return 1
|
||||
else:
|
||||
logger.info("No broken requirements found.")
|
||||
@@ -0,0 +1,81 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
from pip.basecommand import Command
|
||||
|
||||
BASE_COMPLETION = """
|
||||
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPTS = {
|
||||
'bash': """
|
||||
_pip_completion()
|
||||
{
|
||||
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
PIP_AUTO_COMPLETE=1 $1 ) )
|
||||
}
|
||||
complete -o default -F _pip_completion pip
|
||||
""", 'zsh': """
|
||||
function _pip_completion {
|
||||
local words cword
|
||||
read -Ac words
|
||||
read -cn cword
|
||||
reply=( $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$(( cword-1 )) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] ) )
|
||||
}
|
||||
compctl -K _pip_completion pip
|
||||
""", 'fish': """
|
||||
function __fish_complete_pip
|
||||
set -lx COMP_WORDS (commandline -o) ""
|
||||
set -lx COMP_CWORD (math (contains -i -- (commandline -t) $COMP_WORDS)-1)
|
||||
set -lx PIP_AUTO_COMPLETE 1
|
||||
string split \ -- (eval $COMP_WORDS[1])
|
||||
end
|
||||
complete -fa "(__fish_complete_pip)" -c pip
|
||||
"""}
|
||||
|
||||
|
||||
class CompletionCommand(Command):
|
||||
"""A helper command to be used for command completion."""
|
||||
name = 'completion'
|
||||
summary = 'A helper command used for command completion.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(CompletionCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--bash', '-b',
|
||||
action='store_const',
|
||||
const='bash',
|
||||
dest='shell',
|
||||
help='Emit completion code for bash')
|
||||
cmd_opts.add_option(
|
||||
'--zsh', '-z',
|
||||
action='store_const',
|
||||
const='zsh',
|
||||
dest='shell',
|
||||
help='Emit completion code for zsh')
|
||||
cmd_opts.add_option(
|
||||
'--fish', '-f',
|
||||
action='store_const',
|
||||
const='fish',
|
||||
dest='shell',
|
||||
help='Emit completion code for fish')
|
||||
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
"""Prints the completion code of the given shell"""
|
||||
shells = COMPLETION_SCRIPTS.keys()
|
||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||
if options.shell in shells:
|
||||
script = COMPLETION_SCRIPTS.get(options.shell, '')
|
||||
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
|
||||
else:
|
||||
sys.stderr.write(
|
||||
'ERROR: You must pass %s\n' % ' or '.join(shell_options)
|
||||
)
|
||||
212
v1/flask/lib/python3.4/site-packages/pip/commands/download.py
Normal file
212
v1/flask/lib/python3.4/site-packages/pip/commands/download.py
Normal file
@@ -0,0 +1,212 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip.exceptions import CommandError
|
||||
from pip.index import FormatControl
|
||||
from pip.req import RequirementSet
|
||||
from pip.basecommand import RequirementCommand
|
||||
from pip import cmdoptions
|
||||
from pip.utils import ensure_dir, normalize_path
|
||||
from pip.utils.build import BuildDirectory
|
||||
from pip.utils.filesystem import check_path_owner
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadCommand(RequirementCommand):
|
||||
"""
|
||||
Download packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports downloading from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be downloaded.
|
||||
"""
|
||||
name = 'download'
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Download packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(DownloadCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
cmd_opts.add_option(cmdoptions.global_options())
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
cmd_opts.add_option(cmdoptions.pre())
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Download packages into <dir>."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--platform',
|
||||
dest='platform',
|
||||
metavar='platform',
|
||||
default=None,
|
||||
help=("Only download wheels compatible with <platform>. "
|
||||
"Defaults to the platform of the running system."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--python-version',
|
||||
dest='python_version',
|
||||
metavar='python_version',
|
||||
default=None,
|
||||
help=("Only download wheels compatible with Python "
|
||||
"interpreter version <version>. If not specified, then the "
|
||||
"current system interpreter minor version is used. A major "
|
||||
"version (e.g. '2') can be specified to match all "
|
||||
"minor revs of that major version. A minor version "
|
||||
"(e.g. '34') can also be specified."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--implementation',
|
||||
dest='implementation',
|
||||
metavar='implementation',
|
||||
default=None,
|
||||
help=("Only download wheels compatible with Python "
|
||||
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
||||
" or 'ip'. If not specified, then the current "
|
||||
"interpreter implementation is used. Use 'py' to force "
|
||||
"implementation-agnostic wheels."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--abi',
|
||||
dest='abi',
|
||||
metavar='abi',
|
||||
default=None,
|
||||
help=("Only download wheels compatible with Python "
|
||||
"abi <abi>, e.g. 'pypy_41'. If not specified, then the "
|
||||
"current interpreter abi tag is used. Generally "
|
||||
"you will need to specify --implementation, "
|
||||
"--platform, and --python-version when using "
|
||||
"this option."),
|
||||
)
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.non_deprecated_index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
options.ignore_installed = True
|
||||
|
||||
if options.python_version:
|
||||
python_versions = [options.python_version]
|
||||
else:
|
||||
python_versions = None
|
||||
|
||||
dist_restriction_set = any([
|
||||
options.python_version,
|
||||
options.platform,
|
||||
options.abi,
|
||||
options.implementation,
|
||||
])
|
||||
binary_only = FormatControl(set(), set([':all:']))
|
||||
if dist_restriction_set and options.format_control != binary_only:
|
||||
raise CommandError(
|
||||
"--only-binary=:all: must be set and --no-binary must not "
|
||||
"be set (or must be set to :none:) when restricting platform "
|
||||
"and interpreter constraints using --python-version, "
|
||||
"--platform, --abi, or --implementation."
|
||||
)
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
options.download_dir = normalize_path(options.download_dir)
|
||||
|
||||
ensure_dir(options.download_dir)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
platform=options.platform,
|
||||
python_versions=python_versions,
|
||||
abi=options.abi,
|
||||
implementation=options.implementation,
|
||||
)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"by the current user and caching wheels has been "
|
||||
"disabled. check the permissions and owner of that "
|
||||
"directory. If executing pip with sudo, you may want "
|
||||
"sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with BuildDirectory(options.build_dir,
|
||||
delete=build_delete) as build_dir:
|
||||
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=options.download_dir,
|
||||
ignore_installed=True,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
session=session,
|
||||
isolated=options.isolated_mode,
|
||||
require_hashes=options.require_hashes
|
||||
)
|
||||
self.populate_requirement_set(
|
||||
requirement_set,
|
||||
args,
|
||||
options,
|
||||
finder,
|
||||
session,
|
||||
self.name,
|
||||
None
|
||||
)
|
||||
|
||||
if not requirement_set.has_requirements:
|
||||
return
|
||||
|
||||
requirement_set.prepare_files(finder)
|
||||
|
||||
downloaded = ' '.join([
|
||||
req.name for req in requirement_set.successfully_downloaded
|
||||
])
|
||||
if downloaded:
|
||||
logger.info(
|
||||
'Successfully downloaded %s', downloaded
|
||||
)
|
||||
|
||||
# Clean up
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
|
||||
return requirement_set
|
||||
87
v1/flask/lib/python3.4/site-packages/pip/commands/freeze.py
Normal file
87
v1/flask/lib/python3.4/site-packages/pip/commands/freeze.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
import pip
|
||||
from pip.compat import stdlib_pkgs
|
||||
from pip.basecommand import Command
|
||||
from pip.operations.freeze import freeze
|
||||
from pip.wheel import WheelCache
|
||||
|
||||
|
||||
DEV_PKGS = ('pip', 'setuptools', 'distribute', 'wheel')
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
"""
|
||||
Output installed packages in requirements format.
|
||||
|
||||
packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
name = 'freeze'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'Output installed packages in requirements format.'
|
||||
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(FreezeCommand, self).__init__(*args, **kw)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help="Use the order in the given requirements file and its "
|
||||
"comments when generating output. This option can be "
|
||||
"used multiple times.")
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='URL',
|
||||
help='URL for finding packages, which will be added to the '
|
||||
'output.')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
dest='local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If in a virtualenv that has global access, do not output '
|
||||
'globally-installed packages.')
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
self.cmd_opts.add_option(
|
||||
'--all',
|
||||
dest='freeze_all',
|
||||
action='store_true',
|
||||
help='Do not skip these packages in the output:'
|
||||
' %s' % ', '.join(DEV_PKGS))
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
format_control = pip.index.FormatControl(set(), set())
|
||||
wheel_cache = WheelCache(options.cache_dir, format_control)
|
||||
skip = set(stdlib_pkgs)
|
||||
if not options.freeze_all:
|
||||
skip.update(DEV_PKGS)
|
||||
|
||||
freeze_kwargs = dict(
|
||||
requirement=options.requirements,
|
||||
find_links=options.find_links,
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
skip_regex=options.skip_requirements_regex,
|
||||
isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache,
|
||||
skip=skip)
|
||||
|
||||
for line in freeze(**freeze_kwargs):
|
||||
sys.stdout.write(line + '\n')
|
||||
57
v1/flask/lib/python3.4/site-packages/pip/commands/hash.py
Normal file
57
v1/flask/lib/python3.4/site-packages/pip/commands/hash.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.status_codes import ERROR
|
||||
from pip.utils import read_chunks
|
||||
from pip.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HashCommand(Command):
|
||||
"""
|
||||
Compute a hash of a local package archive.
|
||||
|
||||
These can be used with --hash in a requirements file to do repeatable
|
||||
installs.
|
||||
|
||||
"""
|
||||
name = 'hash'
|
||||
usage = '%prog [options] <file> ...'
|
||||
summary = 'Compute hashes of package archives.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(HashCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-a', '--algorithm',
|
||||
dest='algorithm',
|
||||
choices=STRONG_HASHES,
|
||||
action='store',
|
||||
default=FAVORITE_HASH,
|
||||
help='The hash algorithm to use: one of %s' %
|
||||
', '.join(STRONG_HASHES))
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
self.parser.print_usage(sys.stderr)
|
||||
return ERROR
|
||||
|
||||
algorithm = options.algorithm
|
||||
for path in args:
|
||||
logger.info('%s:\n--hash=%s:%s',
|
||||
path, algorithm, _hash_of_file(path, algorithm))
|
||||
|
||||
|
||||
def _hash_of_file(path, algorithm):
|
||||
"""Return the hash digest of a file."""
|
||||
with open(path, 'rb') as archive:
|
||||
hash = hashlib.new(algorithm)
|
||||
for chunk in read_chunks(archive):
|
||||
hash.update(chunk)
|
||||
return hash.hexdigest()
|
||||
35
v1/flask/lib/python3.4/site-packages/pip/commands/help.py
Normal file
35
v1/flask/lib/python3.4/site-packages/pip/commands/help.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from pip.basecommand import Command, SUCCESS
|
||||
from pip.exceptions import CommandError
|
||||
|
||||
|
||||
class HelpCommand(Command):
|
||||
"""Show help for commands"""
|
||||
name = 'help'
|
||||
usage = """
|
||||
%prog <command>"""
|
||||
summary = 'Show help for commands.'
|
||||
|
||||
def run(self, options, args):
|
||||
from pip.commands import commands_dict, get_similar_commands
|
||||
|
||||
try:
|
||||
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||
cmd_name = args[0] # the command we need help for
|
||||
except IndexError:
|
||||
return SUCCESS
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = ['unknown command "%s"' % cmd_name]
|
||||
if guess:
|
||||
msg.append('maybe you meant "%s"' % guess)
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
command = commands_dict[cmd_name]()
|
||||
command.parser.print_help()
|
||||
|
||||
return SUCCESS
|
||||
437
v1/flask/lib/python3.4/site-packages/pip/commands/install.py
Normal file
437
v1/flask/lib/python3.4/site-packages/pip/commands/install.py
Normal file
@@ -0,0 +1,437 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import operator
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import warnings
|
||||
try:
|
||||
import wheel
|
||||
except ImportError:
|
||||
wheel = None
|
||||
|
||||
from pip.req import RequirementSet
|
||||
from pip.basecommand import RequirementCommand
|
||||
from pip.locations import virtualenv_no_global, distutils_scheme
|
||||
from pip.exceptions import (
|
||||
InstallationError, CommandError, PreviousBuildDirError,
|
||||
)
|
||||
from pip import cmdoptions
|
||||
from pip.utils import ensure_dir, get_installed_version
|
||||
from pip.utils.build import BuildDirectory
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip.utils.filesystem import check_path_owner
|
||||
from pip.wheel import WheelCache, WheelBuilder
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InstallCommand(RequirementCommand):
|
||||
"""
|
||||
Install packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports installing from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be installed.
|
||||
"""
|
||||
name = 'install'
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Install packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(InstallCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-t', '--target',
|
||||
dest='target_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help='Install packages into <dir>. '
|
||||
'By default this will not replace existing files/folders in '
|
||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
||||
'with new versions.'
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-d', '--download', '--download-dir', '--download-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help=("Download packages into <dir> instead of installing them, "
|
||||
"regardless of what's already installed."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-U', '--upgrade',
|
||||
dest='upgrade',
|
||||
action='store_true',
|
||||
help='Upgrade all specified packages to the newest available '
|
||||
'version. The handling of dependencies depends on the '
|
||||
'upgrade-strategy used.'
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--upgrade-strategy',
|
||||
dest='upgrade_strategy',
|
||||
default='eager',
|
||||
choices=['only-if-needed', 'eager'],
|
||||
help='Determines how dependency upgrading should be handled. '
|
||||
'"eager" - dependencies are upgraded regardless of '
|
||||
'whether the currently installed version satisfies the '
|
||||
'requirements of the upgraded package(s). '
|
||||
'"only-if-needed" - are upgraded only when they do not '
|
||||
'satisfy the requirements of the upgraded package(s).'
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--force-reinstall',
|
||||
dest='force_reinstall',
|
||||
action='store_true',
|
||||
help='When upgrading, reinstall all packages even if they are '
|
||||
'already up-to-date.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-I', '--ignore-installed',
|
||||
dest='ignore_installed',
|
||||
action='store_true',
|
||||
help='Ignore the installed packages (reinstalling instead).')
|
||||
|
||||
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
|
||||
cmd_opts.add_option(cmdoptions.install_options())
|
||||
cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='use_user_site',
|
||||
action='store_true',
|
||||
help="Install to the Python user install directory for your "
|
||||
"platform. Typically ~/.local/, or %APPDATA%\Python on "
|
||||
"Windows. (See the Python documentation for site.USER_BASE "
|
||||
"for full details.)")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--egg',
|
||||
dest='as_egg',
|
||||
action='store_true',
|
||||
help="Install packages as eggs, not 'flat', like pip normally "
|
||||
"does. This option is not about installing *from* eggs. "
|
||||
"(WARNING: Because this option overrides pip's normal install"
|
||||
" logic, requirements files may not behave as expected.)")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--root',
|
||||
dest='root_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root "
|
||||
"directory.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--prefix',
|
||||
dest='prefix_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Installation prefix where lib, bin and other top-level "
|
||||
"folders are placed")
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--compile",
|
||||
action="store_true",
|
||||
dest="compile",
|
||||
default=True,
|
||||
help="Compile py files to pyc",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--no-compile",
|
||||
action="store_false",
|
||||
dest="compile",
|
||||
help="Do not compile py files to pyc",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(cmdoptions.pre())
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
cmdoptions.resolve_wheel_no_use_binary(options)
|
||||
cmdoptions.check_install_build_global(options)
|
||||
|
||||
if options.as_egg:
|
||||
warnings.warn(
|
||||
"--egg has been deprecated and will be removed in the future. "
|
||||
"This flag is mutually exclusive with large parts of pip, and "
|
||||
"actually using it invalidates pip's ability to manage the "
|
||||
"installation process.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.download_dir:
|
||||
warnings.warn(
|
||||
"pip install --download has been deprecated and will be "
|
||||
"removed in the future. Pip now has a download command that "
|
||||
"should be used instead.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
options.ignore_installed = True
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
install_options = options.install_options or []
|
||||
if options.use_user_site:
|
||||
if options.prefix_path:
|
||||
raise CommandError(
|
||||
"Can not combine '--user' and '--prefix' as they imply "
|
||||
"different installation locations"
|
||||
)
|
||||
if virtualenv_no_global():
|
||||
raise InstallationError(
|
||||
"Can not perform a '--user' install. User site-packages "
|
||||
"are not visible in this virtualenv."
|
||||
)
|
||||
install_options.append('--user')
|
||||
install_options.append('--prefix=')
|
||||
|
||||
temp_target_dir = None
|
||||
if options.target_dir:
|
||||
options.ignore_installed = True
|
||||
temp_target_dir = tempfile.mkdtemp()
|
||||
options.target_dir = os.path.abspath(options.target_dir)
|
||||
if (os.path.exists(options.target_dir) and not
|
||||
os.path.isdir(options.target_dir)):
|
||||
raise CommandError(
|
||||
"Target path exists but is not a directory, will not "
|
||||
"continue."
|
||||
)
|
||||
install_options.append('--home=' + temp_target_dir)
|
||||
|
||||
global_options = options.global_options or []
|
||||
|
||||
with self._build_session(options) as session:
|
||||
|
||||
finder = self._build_package_finder(options, session)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"by the current user and caching wheels has been "
|
||||
"disabled. check the permissions and owner of that "
|
||||
"directory. If executing pip with sudo, you may want "
|
||||
"sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with BuildDirectory(options.build_dir,
|
||||
delete=build_delete) as build_dir:
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=options.download_dir,
|
||||
upgrade=options.upgrade,
|
||||
upgrade_strategy=options.upgrade_strategy,
|
||||
as_egg=options.as_egg,
|
||||
ignore_installed=options.ignore_installed,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
force_reinstall=options.force_reinstall,
|
||||
use_user_site=options.use_user_site,
|
||||
target_dir=temp_target_dir,
|
||||
session=session,
|
||||
pycompile=options.compile,
|
||||
isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache,
|
||||
require_hashes=options.require_hashes,
|
||||
)
|
||||
|
||||
self.populate_requirement_set(
|
||||
requirement_set, args, options, finder, session, self.name,
|
||||
wheel_cache
|
||||
)
|
||||
|
||||
if not requirement_set.has_requirements:
|
||||
return
|
||||
|
||||
try:
|
||||
if (options.download_dir or not wheel or not
|
||||
options.cache_dir):
|
||||
# on -d don't do complex things like building
|
||||
# wheels, and don't try to build wheels when wheel is
|
||||
# not installed.
|
||||
requirement_set.prepare_files(finder)
|
||||
else:
|
||||
# build wheels before install.
|
||||
wb = WheelBuilder(
|
||||
requirement_set,
|
||||
finder,
|
||||
build_options=[],
|
||||
global_options=[],
|
||||
)
|
||||
# Ignore the result: a failed wheel will be
|
||||
# installed from the sdist/vcs whatever.
|
||||
wb.build(autobuilding=True)
|
||||
|
||||
if not options.download_dir:
|
||||
requirement_set.install(
|
||||
install_options,
|
||||
global_options,
|
||||
root=options.root_path,
|
||||
prefix=options.prefix_path,
|
||||
)
|
||||
|
||||
possible_lib_locations = get_lib_location_guesses(
|
||||
user=options.use_user_site,
|
||||
home=temp_target_dir,
|
||||
root=options.root_path,
|
||||
prefix=options.prefix_path,
|
||||
isolated=options.isolated_mode,
|
||||
)
|
||||
reqs = sorted(
|
||||
requirement_set.successfully_installed,
|
||||
key=operator.attrgetter('name'))
|
||||
items = []
|
||||
for req in reqs:
|
||||
item = req.name
|
||||
try:
|
||||
installed_version = get_installed_version(
|
||||
req.name, possible_lib_locations
|
||||
)
|
||||
if installed_version:
|
||||
item += '-' + installed_version
|
||||
except Exception:
|
||||
pass
|
||||
items.append(item)
|
||||
installed = ' '.join(items)
|
||||
if installed:
|
||||
logger.info('Successfully installed %s', installed)
|
||||
else:
|
||||
downloaded = ' '.join([
|
||||
req.name
|
||||
for req in requirement_set.successfully_downloaded
|
||||
])
|
||||
if downloaded:
|
||||
logger.info(
|
||||
'Successfully downloaded %s', downloaded
|
||||
)
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
# Clean up
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
|
||||
if options.target_dir:
|
||||
ensure_dir(options.target_dir)
|
||||
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
lib_dir_list = []
|
||||
|
||||
purelib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
|
||||
platlib_dir = distutils_scheme('', home=temp_target_dir)['platlib']
|
||||
|
||||
if os.path.exists(purelib_dir):
|
||||
lib_dir_list.append(purelib_dir)
|
||||
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
||||
lib_dir_list.append(platlib_dir)
|
||||
|
||||
for lib_dir in lib_dir_list:
|
||||
for item in os.listdir(lib_dir):
|
||||
target_item_dir = os.path.join(options.target_dir, item)
|
||||
if os.path.exists(target_item_dir):
|
||||
if not options.upgrade:
|
||||
logger.warning(
|
||||
'Target directory %s already exists. Specify '
|
||||
'--upgrade to force replacement.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.islink(target_item_dir):
|
||||
logger.warning(
|
||||
'Target directory %s already exists and is '
|
||||
'a link. Pip will not automatically replace '
|
||||
'links, please remove if replacement is '
|
||||
'desired.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.isdir(target_item_dir):
|
||||
shutil.rmtree(target_item_dir)
|
||||
else:
|
||||
os.remove(target_item_dir)
|
||||
|
||||
shutil.move(
|
||||
os.path.join(lib_dir, item),
|
||||
target_item_dir
|
||||
)
|
||||
shutil.rmtree(temp_target_dir)
|
||||
return requirement_set
|
||||
|
||||
|
||||
def get_lib_location_guesses(*args, **kwargs):
|
||||
scheme = distutils_scheme('', *args, **kwargs)
|
||||
return [scheme['purelib'], scheme['platlib']]
|
||||
337
v1/flask/lib/python3.4/site-packages/pip/commands/list.py
Normal file
337
v1/flask/lib/python3.4/site-packages/pip/commands/list.py
Normal file
@@ -0,0 +1,337 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import logging
|
||||
import warnings
|
||||
try:
|
||||
from itertools import zip_longest
|
||||
except ImportError:
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
from pip._vendor import six
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.exceptions import CommandError
|
||||
from pip.index import PackageFinder
|
||||
from pip.utils import (
|
||||
get_installed_distributions, dist_is_editable)
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip.cmdoptions import make_option_group, index_group
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ListCommand(Command):
|
||||
"""
|
||||
List installed packages, including editables.
|
||||
|
||||
Packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
name = 'list'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'List installed packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ListCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-o', '--outdated',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List outdated packages')
|
||||
cmd_opts.add_option(
|
||||
'-u', '--uptodate',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List uptodate packages')
|
||||
cmd_opts.add_option(
|
||||
'-e', '--editable',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List editable projects.')
|
||||
cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=('If in a virtualenv that has global access, do not list '
|
||||
'globally-installed packages.'),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--format',
|
||||
action='store',
|
||||
dest='list_format',
|
||||
choices=('legacy', 'columns', 'freeze', 'json'),
|
||||
help="Select the output format among: legacy (default), columns, "
|
||||
"freeze or json.",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--not-required',
|
||||
action='store_true',
|
||||
dest='not_required',
|
||||
help="List packages that are not dependencies of "
|
||||
"installed packages.",
|
||||
)
|
||||
|
||||
index_opts = make_option_group(index_group, self.parser)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def _build_package_finder(self, options, index_urls, session):
|
||||
"""
|
||||
Create a package finder appropriate to this list command.
|
||||
"""
|
||||
return PackageFinder(
|
||||
find_links=options.find_links,
|
||||
index_urls=index_urls,
|
||||
allow_all_prereleases=options.pre,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
process_dependency_links=options.process_dependency_links,
|
||||
session=session,
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
if options.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.list_format is None:
|
||||
warnings.warn(
|
||||
"The default format will switch to columns in the future. "
|
||||
"You can use --format=(legacy|columns) (or define a "
|
||||
"format=(legacy|columns) in your pip.conf under the [list] "
|
||||
"section) to disable this warning.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.outdated and options.uptodate:
|
||||
raise CommandError(
|
||||
"Options --outdated and --uptodate cannot be combined.")
|
||||
|
||||
packages = get_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
editables_only=options.editable,
|
||||
)
|
||||
|
||||
if options.outdated:
|
||||
packages = self.get_outdated(packages, options)
|
||||
elif options.uptodate:
|
||||
packages = self.get_uptodate(packages, options)
|
||||
|
||||
if options.not_required:
|
||||
packages = self.get_not_required(packages, options)
|
||||
|
||||
self.output_package_listing(packages, options)
|
||||
|
||||
def get_outdated(self, packages, options):
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version > dist.parsed_version
|
||||
]
|
||||
|
||||
def get_uptodate(self, packages, options):
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version == dist.parsed_version
|
||||
]
|
||||
|
||||
def get_not_required(self, packages, options):
|
||||
dep_keys = set()
|
||||
for dist in packages:
|
||||
dep_keys.update(requirement.key for requirement in dist.requires())
|
||||
return set(pkg for pkg in packages if pkg.key not in dep_keys)
|
||||
|
||||
def iter_packages_latest_infos(self, packages, options):
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
dependency_links = []
|
||||
for dist in packages:
|
||||
if dist.has_metadata('dependency_links.txt'):
|
||||
dependency_links.extend(
|
||||
dist.get_metadata_lines('dependency_links.txt'),
|
||||
)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, index_urls, session)
|
||||
finder.add_dependency_links(dependency_links)
|
||||
|
||||
for dist in packages:
|
||||
typ = 'unknown'
|
||||
all_candidates = finder.find_all_candidates(dist.key)
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
all_candidates = [candidate for candidate in all_candidates
|
||||
if not candidate.version.is_prerelease]
|
||||
|
||||
if not all_candidates:
|
||||
continue
|
||||
best_candidate = max(all_candidates,
|
||||
key=finder._candidate_sort_key)
|
||||
remote_version = best_candidate.version
|
||||
if best_candidate.location.is_wheel:
|
||||
typ = 'wheel'
|
||||
else:
|
||||
typ = 'sdist'
|
||||
# This is dirty but makes the rest of the code much cleaner
|
||||
dist.latest_version = remote_version
|
||||
dist.latest_filetype = typ
|
||||
yield dist
|
||||
|
||||
def output_legacy(self, dist):
|
||||
if dist_is_editable(dist):
|
||||
return '%s (%s, %s)' % (
|
||||
dist.project_name,
|
||||
dist.version,
|
||||
dist.location,
|
||||
)
|
||||
else:
|
||||
return '%s (%s)' % (dist.project_name, dist.version)
|
||||
|
||||
def output_legacy_latest(self, dist):
|
||||
return '%s - Latest: %s [%s]' % (
|
||||
self.output_legacy(dist),
|
||||
dist.latest_version,
|
||||
dist.latest_filetype,
|
||||
)
|
||||
|
||||
def output_package_listing(self, packages, options):
|
||||
packages = sorted(
|
||||
packages,
|
||||
key=lambda dist: dist.project_name.lower(),
|
||||
)
|
||||
if options.list_format == 'columns' and packages:
|
||||
data, header = format_for_columns(packages, options)
|
||||
self.output_package_listing_columns(data, header)
|
||||
elif options.list_format == 'freeze':
|
||||
for dist in packages:
|
||||
logger.info("%s==%s", dist.project_name, dist.version)
|
||||
elif options.list_format == 'json':
|
||||
logger.info(format_for_json(packages, options))
|
||||
else: # legacy
|
||||
for dist in packages:
|
||||
if options.outdated:
|
||||
logger.info(self.output_legacy_latest(dist))
|
||||
else:
|
||||
logger.info(self.output_legacy(dist))
|
||||
|
||||
def output_package_listing_columns(self, data, header):
|
||||
# insert the header first: we need to know the size of column names
|
||||
if len(data) > 0:
|
||||
data.insert(0, header)
|
||||
|
||||
pkg_strings, sizes = tabulate(data)
|
||||
|
||||
# Create and add a separator.
|
||||
if len(data) > 0:
|
||||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
||||
|
||||
for val in pkg_strings:
|
||||
logger.info(val)
|
||||
|
||||
|
||||
def tabulate(vals):
|
||||
# From pfmoore on GitHub:
|
||||
# https://github.com/pypa/pip/issues/3651#issuecomment-216932564
|
||||
assert len(vals) > 0
|
||||
|
||||
sizes = [0] * max(len(x) for x in vals)
|
||||
for row in vals:
|
||||
sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
|
||||
|
||||
result = []
|
||||
for row in vals:
|
||||
display = " ".join([str(c).ljust(s) if c is not None else ''
|
||||
for s, c in zip_longest(sizes, row)])
|
||||
result.append(display)
|
||||
|
||||
return result, sizes
|
||||
|
||||
|
||||
def format_for_columns(pkgs, options):
|
||||
"""
|
||||
Convert the package data into something usable
|
||||
by output_package_listing_columns.
|
||||
"""
|
||||
running_outdated = options.outdated
|
||||
# Adjust the header for the `pip list --outdated` case.
|
||||
if running_outdated:
|
||||
header = ["Package", "Version", "Latest", "Type"]
|
||||
else:
|
||||
header = ["Package", "Version"]
|
||||
|
||||
data = []
|
||||
if any(dist_is_editable(x) for x in pkgs):
|
||||
header.append("Location")
|
||||
|
||||
for proj in pkgs:
|
||||
# if we're working on the 'outdated' list, separate out the
|
||||
# latest_version and type
|
||||
row = [proj.project_name, proj.version]
|
||||
|
||||
if running_outdated:
|
||||
row.append(proj.latest_version)
|
||||
row.append(proj.latest_filetype)
|
||||
|
||||
if dist_is_editable(proj):
|
||||
row.append(proj.location)
|
||||
|
||||
data.append(row)
|
||||
|
||||
return data, header
|
||||
|
||||
|
||||
def format_for_json(packages, options):
|
||||
data = []
|
||||
for dist in packages:
|
||||
info = {
|
||||
'name': dist.project_name,
|
||||
'version': six.text_type(dist.version),
|
||||
}
|
||||
if options.outdated:
|
||||
info['latest_version'] = six.text_type(dist.latest_version)
|
||||
info['latest_filetype'] = dist.latest_filetype
|
||||
data.append(info)
|
||||
return json.dumps(data)
|
||||
133
v1/flask/lib/python3.4/site-packages/pip/commands/search.py
Normal file
133
v1/flask/lib/python3.4/site-packages/pip/commands/search.py
Normal file
@@ -0,0 +1,133 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from pip.basecommand import Command, SUCCESS
|
||||
from pip.compat import OrderedDict
|
||||
from pip.download import PipXmlrpcTransport
|
||||
from pip.models import PyPI
|
||||
from pip.utils import get_terminal_size
|
||||
from pip.utils.logging import indent_log
|
||||
from pip.exceptions import CommandError
|
||||
from pip.status_codes import NO_MATCHES_FOUND
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.six.moves import xmlrpc_client
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchCommand(Command):
|
||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||
name = 'search'
|
||||
usage = """
|
||||
%prog [options] <query>"""
|
||||
summary = 'Search PyPI for packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(SearchCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-i', '--index',
|
||||
dest='index',
|
||||
metavar='URL',
|
||||
default=PyPI.pypi_url,
|
||||
help='Base URL of Python Package Index (default %default)')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
raise CommandError('Missing required argument (search query).')
|
||||
query = args
|
||||
pypi_hits = self.search(query, options)
|
||||
hits = transform_hits(pypi_hits)
|
||||
|
||||
terminal_width = None
|
||||
if sys.stdout.isatty():
|
||||
terminal_width = get_terminal_size()[0]
|
||||
|
||||
print_results(hits, terminal_width=terminal_width)
|
||||
if pypi_hits:
|
||||
return SUCCESS
|
||||
return NO_MATCHES_FOUND
|
||||
|
||||
def search(self, query, options):
|
||||
index_url = options.index
|
||||
with self._build_session(options) as session:
|
||||
transport = PipXmlrpcTransport(index_url, session)
|
||||
pypi = xmlrpc_client.ServerProxy(index_url, transport)
|
||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||
return hits
|
||||
|
||||
|
||||
def transform_hits(hits):
|
||||
"""
|
||||
The list from pypi is really a list of versions. We want a list of
|
||||
packages with the list of versions stored inline. This converts the
|
||||
list from pypi into one we can use.
|
||||
"""
|
||||
packages = OrderedDict()
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary']
|
||||
version = hit['version']
|
||||
|
||||
if name not in packages.keys():
|
||||
packages[name] = {
|
||||
'name': name,
|
||||
'summary': summary,
|
||||
'versions': [version],
|
||||
}
|
||||
else:
|
||||
packages[name]['versions'].append(version)
|
||||
|
||||
# if this is the highest version, replace summary and score
|
||||
if version == highest_version(packages[name]['versions']):
|
||||
packages[name]['summary'] = summary
|
||||
|
||||
return list(packages.values())
|
||||
|
||||
|
||||
def print_results(hits, name_column_width=None, terminal_width=None):
|
||||
if not hits:
|
||||
return
|
||||
if name_column_width is None:
|
||||
name_column_width = max([
|
||||
len(hit['name']) + len(hit.get('versions', ['-'])[-1])
|
||||
for hit in hits
|
||||
]) + 4
|
||||
|
||||
installed_packages = [p.project_name for p in pkg_resources.working_set]
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary'] or ''
|
||||
version = hit.get('versions', ['-'])[-1]
|
||||
if terminal_width is not None:
|
||||
target_width = terminal_width - name_column_width - 5
|
||||
if target_width > 10:
|
||||
# wrap and indent summary to fit terminal
|
||||
summary = textwrap.wrap(summary, target_width)
|
||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
||||
|
||||
line = '%-*s - %s' % (name_column_width,
|
||||
'%s (%s)' % (name, version), summary)
|
||||
try:
|
||||
logger.info(line)
|
||||
if name in installed_packages:
|
||||
dist = pkg_resources.get_distribution(name)
|
||||
with indent_log():
|
||||
latest = highest_version(hit['versions'])
|
||||
if dist.version == latest:
|
||||
logger.info('INSTALLED: %s (latest)', dist.version)
|
||||
else:
|
||||
logger.info('INSTALLED: %s', dist.version)
|
||||
logger.info('LATEST: %s', latest)
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
|
||||
def highest_version(versions):
|
||||
return max(versions, key=parse_version)
|
||||
154
v1/flask/lib/python3.4/site-packages/pip/commands/show.py
Normal file
154
v1/flask/lib/python3.4/site-packages/pip/commands/show.py
Normal file
@@ -0,0 +1,154 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from email.parser import FeedParser
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.status_codes import SUCCESS, ERROR
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ShowCommand(Command):
|
||||
"""Show information about one or more installed packages."""
|
||||
name = 'show'
|
||||
usage = """
|
||||
%prog [options] <package> ..."""
|
||||
summary = 'Show information about installed packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ShowCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--files',
|
||||
dest='files',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Show the full list of installed files for each package.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
logger.warning('ERROR: Please provide a package name or names.')
|
||||
return ERROR
|
||||
query = args
|
||||
|
||||
results = search_packages_info(query)
|
||||
if not print_results(
|
||||
results, list_files=options.files, verbose=options.verbose):
|
||||
return ERROR
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def search_packages_info(query):
|
||||
"""
|
||||
Gather details from installed distributions. Print distribution name,
|
||||
version, location, and installed files. Installed files requires a
|
||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||
directory.
|
||||
"""
|
||||
installed = {}
|
||||
for p in pkg_resources.working_set:
|
||||
installed[canonicalize_name(p.project_name)] = p
|
||||
|
||||
query_names = [canonicalize_name(name) for name in query]
|
||||
|
||||
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
||||
package = {
|
||||
'name': dist.project_name,
|
||||
'version': dist.version,
|
||||
'location': dist.location,
|
||||
'requires': [dep.project_name for dep in dist.requires()],
|
||||
}
|
||||
file_list = None
|
||||
metadata = None
|
||||
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
||||
# RECORDs should be part of .dist-info metadatas
|
||||
if dist.has_metadata('RECORD'):
|
||||
lines = dist.get_metadata_lines('RECORD')
|
||||
paths = [l.split(',')[0] for l in lines]
|
||||
paths = [os.path.join(dist.location, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('METADATA'):
|
||||
metadata = dist.get_metadata('METADATA')
|
||||
else:
|
||||
# Otherwise use pip's log for .egg-info's
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
paths = dist.get_metadata_lines('installed-files.txt')
|
||||
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('PKG-INFO'):
|
||||
metadata = dist.get_metadata('PKG-INFO')
|
||||
|
||||
if dist.has_metadata('entry_points.txt'):
|
||||
entry_points = dist.get_metadata_lines('entry_points.txt')
|
||||
package['entry_points'] = entry_points
|
||||
|
||||
if dist.has_metadata('INSTALLER'):
|
||||
for line in dist.get_metadata_lines('INSTALLER'):
|
||||
if line.strip():
|
||||
package['installer'] = line.strip()
|
||||
break
|
||||
|
||||
# @todo: Should pkg_resources.Distribution have a
|
||||
# `get_pkg_info` method?
|
||||
feed_parser = FeedParser()
|
||||
feed_parser.feed(metadata)
|
||||
pkg_info_dict = feed_parser.close()
|
||||
for key in ('metadata-version', 'summary',
|
||||
'home-page', 'author', 'author-email', 'license'):
|
||||
package[key] = pkg_info_dict.get(key)
|
||||
|
||||
# It looks like FeedParser cannot deal with repeated headers
|
||||
classifiers = []
|
||||
for line in metadata.splitlines():
|
||||
if line.startswith('Classifier: '):
|
||||
classifiers.append(line[len('Classifier: '):])
|
||||
package['classifiers'] = classifiers
|
||||
|
||||
if file_list:
|
||||
package['files'] = sorted(file_list)
|
||||
yield package
|
||||
|
||||
|
||||
def print_results(distributions, list_files=False, verbose=False):
|
||||
"""
|
||||
Print the informations from installed distributions found.
|
||||
"""
|
||||
results_printed = False
|
||||
for i, dist in enumerate(distributions):
|
||||
results_printed = True
|
||||
if i > 0:
|
||||
logger.info("---")
|
||||
logger.info("Name: %s", dist.get('name', ''))
|
||||
logger.info("Version: %s", dist.get('version', ''))
|
||||
logger.info("Summary: %s", dist.get('summary', ''))
|
||||
logger.info("Home-page: %s", dist.get('home-page', ''))
|
||||
logger.info("Author: %s", dist.get('author', ''))
|
||||
logger.info("Author-email: %s", dist.get('author-email', ''))
|
||||
logger.info("License: %s", dist.get('license', ''))
|
||||
logger.info("Location: %s", dist.get('location', ''))
|
||||
logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
|
||||
if verbose:
|
||||
logger.info("Metadata-Version: %s",
|
||||
dist.get('metadata-version', ''))
|
||||
logger.info("Installer: %s", dist.get('installer', ''))
|
||||
logger.info("Classifiers:")
|
||||
for classifier in dist.get('classifiers', []):
|
||||
logger.info(" %s", classifier)
|
||||
logger.info("Entry-points:")
|
||||
for entry in dist.get('entry_points', []):
|
||||
logger.info(" %s", entry.strip())
|
||||
if list_files:
|
||||
logger.info("Files:")
|
||||
for line in dist.get('files', []):
|
||||
logger.info(" %s", line.strip())
|
||||
if "files" not in dist:
|
||||
logger.info("Cannot locate installed-files.txt")
|
||||
return results_printed
|
||||
@@ -0,0 +1,76 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pip
|
||||
from pip.wheel import WheelCache
|
||||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||
from pip.basecommand import Command
|
||||
from pip.exceptions import InstallationError
|
||||
|
||||
|
||||
class UninstallCommand(Command):
|
||||
"""
|
||||
Uninstall packages.
|
||||
|
||||
pip is able to uninstall most installed packages. Known exceptions are:
|
||||
|
||||
- Pure distutils packages installed with ``python setup.py install``, which
|
||||
leave behind no metadata to determine what files were installed.
|
||||
- Script wrappers installed by ``python setup.py develop``.
|
||||
"""
|
||||
name = 'uninstall'
|
||||
usage = """
|
||||
%prog [options] <package> ...
|
||||
%prog [options] -r <requirements file> ..."""
|
||||
summary = 'Uninstall packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(UninstallCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Uninstall all the packages listed in the given requirements '
|
||||
'file. This option can be used multiple times.',
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'-y', '--yes',
|
||||
dest='yes',
|
||||
action='store_true',
|
||||
help="Don't ask for confirmation of uninstall deletions.")
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
with self._build_session(options) as session:
|
||||
format_control = pip.index.FormatControl(set(), set())
|
||||
wheel_cache = WheelCache(options.cache_dir, format_control)
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=None,
|
||||
src_dir=None,
|
||||
download_dir=None,
|
||||
isolated=options.isolated_mode,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
for name in args:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_line(
|
||||
name, isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
)
|
||||
for filename in options.requirements:
|
||||
for req in parse_requirements(
|
||||
filename,
|
||||
options=options,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache):
|
||||
requirement_set.add_requirement(req)
|
||||
if not requirement_set.has_requirements:
|
||||
raise InstallationError(
|
||||
'You must give at least one requirement to %(name)s (see '
|
||||
'"pip help %(name)s")' % dict(name=self.name)
|
||||
)
|
||||
requirement_set.uninstall(auto_confirm=options.yes)
|
||||
208
v1/flask/lib/python3.4/site-packages/pip/commands/wheel.py
Normal file
208
v1/flask/lib/python3.4/site-packages/pip/commands/wheel.py
Normal file
@@ -0,0 +1,208 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from pip.basecommand import RequirementCommand
|
||||
from pip.exceptions import CommandError, PreviousBuildDirError
|
||||
from pip.req import RequirementSet
|
||||
from pip.utils import import_or_raise
|
||||
from pip.utils.build import BuildDirectory
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip.wheel import WheelCache, WheelBuilder
|
||||
from pip import cmdoptions
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WheelCommand(RequirementCommand):
|
||||
"""
|
||||
Build Wheel archives for your requirements and dependencies.
|
||||
|
||||
Wheel is a built-package format, and offers the advantage of not
|
||||
recompiling your software during every install. For more details, see the
|
||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||
|
||||
Requirements: setuptools>=0.8, and wheel.
|
||||
|
||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
||||
package to build individual wheels.
|
||||
|
||||
"""
|
||||
|
||||
name = 'wheel'
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> ...
|
||||
%prog [options] -r <requirements file> ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Build wheels from your requirements.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(WheelCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-w', '--wheel-dir',
|
||||
dest='wheel_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Build wheels into <dir>, where the default is the "
|
||||
"current working directory."),
|
||||
)
|
||||
cmd_opts.add_option(cmdoptions.use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(
|
||||
'--build-option',
|
||||
dest='build_options',
|
||||
metavar='options',
|
||||
action='append',
|
||||
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.")
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--global-option',
|
||||
dest='global_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra global options to be supplied to the setup.py "
|
||||
"call before the 'bdist_wheel' command.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def check_required_packages(self):
|
||||
import_or_raise(
|
||||
'wheel.bdist_wheel',
|
||||
CommandError,
|
||||
"'pip wheel' requires the 'wheel' package. To fix this, run: "
|
||||
"pip install wheel"
|
||||
)
|
||||
pkg_resources = import_or_raise(
|
||||
'pkg_resources',
|
||||
CommandError,
|
||||
"'pip wheel' requires setuptools >= 0.8 for dist-info support."
|
||||
" To fix this, run: pip install --upgrade setuptools"
|
||||
)
|
||||
if not hasattr(pkg_resources, 'DistInfoDistribution'):
|
||||
raise CommandError(
|
||||
"'pip wheel' requires setuptools >= 0.8 for dist-info "
|
||||
"support. To fix this, run: pip install --upgrade "
|
||||
"setuptools"
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
self.check_required_packages()
|
||||
cmdoptions.resolve_wheel_no_use_binary(options)
|
||||
cmdoptions.check_install_build_global(options)
|
||||
|
||||
if options.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, session)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
with BuildDirectory(options.build_dir,
|
||||
delete=build_delete) as build_dir:
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=None,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_installed=True,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
isolated=options.isolated_mode,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache,
|
||||
wheel_download_dir=options.wheel_dir,
|
||||
require_hashes=options.require_hashes
|
||||
)
|
||||
|
||||
self.populate_requirement_set(
|
||||
requirement_set, args, options, finder, session, self.name,
|
||||
wheel_cache
|
||||
)
|
||||
|
||||
if not requirement_set.has_requirements:
|
||||
return
|
||||
|
||||
try:
|
||||
# build wheels
|
||||
wb = WheelBuilder(
|
||||
requirement_set,
|
||||
finder,
|
||||
build_options=options.build_options or [],
|
||||
global_options=options.global_options or [],
|
||||
)
|
||||
if not wb.build():
|
||||
raise CommandError(
|
||||
"Failed to build one or more wheels"
|
||||
)
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
Reference in New Issue
Block a user