mirror of
https://github.com/RaidMax/IW4M-Admin.git
synced 2025-07-04 10:59:27 -05:00
Add server version to master api
Add IsEvadedOffense to EFPenalty Fix remote log reading in not Windows
This commit is contained in:
10
Master/env_master/Lib/site-packages/pip/req/__init__.py
Normal file
10
Master/env_master/Lib/site-packages/pip/req/__init__.py
Normal file
@ -0,0 +1,10 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .req_install import InstallRequirement
|
||||
from .req_set import RequirementSet, Requirements
|
||||
from .req_file import parse_requirements
|
||||
|
||||
__all__ = [
|
||||
"RequirementSet", "Requirements", "InstallRequirement",
|
||||
"parse_requirements",
|
||||
]
|
342
Master/env_master/Lib/site-packages/pip/req/req_file.py
Normal file
342
Master/env_master/Lib/site-packages/pip/req/req_file.py
Normal file
@ -0,0 +1,342 @@
|
||||
"""
|
||||
Requirements file parsing
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
from pip._vendor.six.moves import filterfalse
|
||||
|
||||
import pip
|
||||
from pip.download import get_file_content
|
||||
from pip.req.req_install import InstallRequirement
|
||||
from pip.exceptions import (RequirementsFileParseError)
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip import cmdoptions
|
||||
|
||||
__all__ = ['parse_requirements']
|
||||
|
||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||
COMMENT_RE = re.compile(r'(^|\s)+#.*$')
|
||||
|
||||
SUPPORTED_OPTIONS = [
|
||||
cmdoptions.constraints,
|
||||
cmdoptions.editable,
|
||||
cmdoptions.requirements,
|
||||
cmdoptions.no_index,
|
||||
cmdoptions.index_url,
|
||||
cmdoptions.find_links,
|
||||
cmdoptions.extra_index_url,
|
||||
cmdoptions.allow_external,
|
||||
cmdoptions.allow_all_external,
|
||||
cmdoptions.no_allow_external,
|
||||
cmdoptions.allow_unsafe,
|
||||
cmdoptions.no_allow_unsafe,
|
||||
cmdoptions.use_wheel,
|
||||
cmdoptions.no_use_wheel,
|
||||
cmdoptions.always_unzip,
|
||||
cmdoptions.no_binary,
|
||||
cmdoptions.only_binary,
|
||||
cmdoptions.pre,
|
||||
cmdoptions.process_dependency_links,
|
||||
cmdoptions.trusted_host,
|
||||
cmdoptions.require_hashes,
|
||||
]
|
||||
|
||||
# options to be passed to requirements
|
||||
SUPPORTED_OPTIONS_REQ = [
|
||||
cmdoptions.install_options,
|
||||
cmdoptions.global_options,
|
||||
cmdoptions.hash,
|
||||
]
|
||||
|
||||
# the 'dest' string values
|
||||
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
|
||||
|
||||
|
||||
def parse_requirements(filename, finder=None, comes_from=None, options=None,
|
||||
session=None, constraint=False, wheel_cache=None):
|
||||
"""Parse a requirements file and yield InstallRequirement instances.
|
||||
|
||||
:param filename: Path or url of requirements file.
|
||||
:param finder: Instance of pip.index.PackageFinder.
|
||||
:param comes_from: Origin description of requirements.
|
||||
:param options: cli options.
|
||||
:param session: Instance of pip.download.PipSession.
|
||||
:param constraint: If true, parsing a constraint file rather than
|
||||
requirements file.
|
||||
:param wheel_cache: Instance of pip.wheel.WheelCache
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"parse_requirements() missing 1 required keyword argument: "
|
||||
"'session'"
|
||||
)
|
||||
|
||||
_, content = get_file_content(
|
||||
filename, comes_from=comes_from, session=session
|
||||
)
|
||||
|
||||
lines_enum = preprocess(content, options)
|
||||
|
||||
for line_number, line in lines_enum:
|
||||
req_iter = process_line(line, filename, line_number, finder,
|
||||
comes_from, options, session, wheel_cache,
|
||||
constraint=constraint)
|
||||
for req in req_iter:
|
||||
yield req
|
||||
|
||||
|
||||
def preprocess(content, options):
|
||||
"""Split, filter, and join lines, and return a line iterator
|
||||
|
||||
:param content: the content of the requirements file
|
||||
:param options: cli options
|
||||
"""
|
||||
lines_enum = enumerate(content.splitlines(), start=1)
|
||||
lines_enum = join_lines(lines_enum)
|
||||
lines_enum = ignore_comments(lines_enum)
|
||||
lines_enum = skip_regex(lines_enum, options)
|
||||
return lines_enum
|
||||
|
||||
|
||||
def process_line(line, filename, line_number, finder=None, comes_from=None,
|
||||
options=None, session=None, wheel_cache=None,
|
||||
constraint=False):
|
||||
"""Process a single requirements line; This can result in creating/yielding
|
||||
requirements, or updating the finder.
|
||||
|
||||
For lines that contain requirements, the only options that have an effect
|
||||
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
||||
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
||||
ignored.
|
||||
|
||||
For lines that do not contain requirements, the only options that have an
|
||||
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
||||
be present, but are ignored. These lines may contain multiple options
|
||||
(although our docs imply only one is supported), and all our parsed and
|
||||
affect the finder.
|
||||
|
||||
:param constraint: If True, parsing a constraints file.
|
||||
:param options: OptionParser options that we may update
|
||||
"""
|
||||
parser = build_parser()
|
||||
defaults = parser.get_default_values()
|
||||
defaults.index_url = None
|
||||
if finder:
|
||||
# `finder.format_control` will be updated during parsing
|
||||
defaults.format_control = finder.format_control
|
||||
args_str, options_str = break_args_options(line)
|
||||
if sys.version_info < (2, 7, 3):
|
||||
# Prior to 2.7.3, shlex cannot deal with unicode entries
|
||||
options_str = options_str.encode('utf8')
|
||||
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
|
||||
|
||||
# preserve for the nested code path
|
||||
line_comes_from = '%s %s (line %s)' % (
|
||||
'-c' if constraint else '-r', filename, line_number)
|
||||
|
||||
# yield a line requirement
|
||||
if args_str:
|
||||
isolated = options.isolated_mode if options else False
|
||||
if options:
|
||||
cmdoptions.check_install_build_global(options, opts)
|
||||
# get the options that apply to requirements
|
||||
req_options = {}
|
||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||
if dest in opts.__dict__ and opts.__dict__[dest]:
|
||||
req_options[dest] = opts.__dict__[dest]
|
||||
yield InstallRequirement.from_line(
|
||||
args_str, line_comes_from, constraint=constraint,
|
||||
isolated=isolated, options=req_options, wheel_cache=wheel_cache
|
||||
)
|
||||
|
||||
# yield an editable requirement
|
||||
elif opts.editables:
|
||||
isolated = options.isolated_mode if options else False
|
||||
default_vcs = options.default_vcs if options else None
|
||||
yield InstallRequirement.from_editable(
|
||||
opts.editables[0], comes_from=line_comes_from,
|
||||
constraint=constraint, default_vcs=default_vcs, isolated=isolated,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
|
||||
# parse a nested requirements file
|
||||
elif opts.requirements or opts.constraints:
|
||||
if opts.requirements:
|
||||
req_path = opts.requirements[0]
|
||||
nested_constraint = False
|
||||
else:
|
||||
req_path = opts.constraints[0]
|
||||
nested_constraint = True
|
||||
# original file is over http
|
||||
if SCHEME_RE.search(filename):
|
||||
# do a url join so relative paths work
|
||||
req_path = urllib_parse.urljoin(filename, req_path)
|
||||
# original file and nested file are paths
|
||||
elif not SCHEME_RE.search(req_path):
|
||||
# do a join so relative paths work
|
||||
req_path = os.path.join(os.path.dirname(filename), req_path)
|
||||
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
|
||||
parser = parse_requirements(
|
||||
req_path, finder, comes_from, options, session,
|
||||
constraint=nested_constraint, wheel_cache=wheel_cache
|
||||
)
|
||||
for req in parser:
|
||||
yield req
|
||||
|
||||
# percolate hash-checking option upward
|
||||
elif opts.require_hashes:
|
||||
options.require_hashes = opts.require_hashes
|
||||
|
||||
# set finder options
|
||||
elif finder:
|
||||
if opts.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if opts.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if opts.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if opts.index_url:
|
||||
finder.index_urls = [opts.index_url]
|
||||
if opts.use_wheel is False:
|
||||
finder.use_wheel = False
|
||||
pip.index.fmt_ctl_no_use_wheel(finder.format_control)
|
||||
if opts.no_index is True:
|
||||
finder.index_urls = []
|
||||
if opts.extra_index_urls:
|
||||
finder.index_urls.extend(opts.extra_index_urls)
|
||||
if opts.find_links:
|
||||
# FIXME: it would be nice to keep track of the source
|
||||
# of the find_links: support a find-links local path
|
||||
# relative to a requirements file.
|
||||
value = opts.find_links[0]
|
||||
req_dir = os.path.dirname(os.path.abspath(filename))
|
||||
relative_to_reqs_file = os.path.join(req_dir, value)
|
||||
if os.path.exists(relative_to_reqs_file):
|
||||
value = relative_to_reqs_file
|
||||
finder.find_links.append(value)
|
||||
if opts.pre:
|
||||
finder.allow_all_prereleases = True
|
||||
if opts.process_dependency_links:
|
||||
finder.process_dependency_links = True
|
||||
if opts.trusted_hosts:
|
||||
finder.secure_origins.extend(
|
||||
("*", host, "*") for host in opts.trusted_hosts)
|
||||
|
||||
|
||||
def break_args_options(line):
|
||||
"""Break up the line into an args and options string. We only want to shlex
|
||||
(and then optparse) the options, not the args. args can contain markers
|
||||
which are corrupted by shlex.
|
||||
"""
|
||||
tokens = line.split(' ')
|
||||
args = []
|
||||
options = tokens[:]
|
||||
for token in tokens:
|
||||
if token.startswith('-') or token.startswith('--'):
|
||||
break
|
||||
else:
|
||||
args.append(token)
|
||||
options.pop(0)
|
||||
return ' '.join(args), ' '.join(options)
|
||||
|
||||
|
||||
def build_parser():
|
||||
"""
|
||||
Return a parser for parsing requirement lines
|
||||
"""
|
||||
parser = optparse.OptionParser(add_help_option=False)
|
||||
|
||||
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
||||
for option_factory in option_factories:
|
||||
option = option_factory()
|
||||
parser.add_option(option)
|
||||
|
||||
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||
# that in our own exception.
|
||||
def parser_exit(self, msg):
|
||||
raise RequirementsFileParseError(msg)
|
||||
parser.exit = parser_exit
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def join_lines(lines_enum):
|
||||
"""Joins a line ending in '\' with the previous line (except when following
|
||||
comments). The joined line takes on the index of the first line.
|
||||
"""
|
||||
primary_line_number = None
|
||||
new_line = []
|
||||
for line_number, line in lines_enum:
|
||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||
if COMMENT_RE.match(line):
|
||||
# this ensures comments are always matched later
|
||||
line = ' ' + line
|
||||
if new_line:
|
||||
new_line.append(line)
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
new_line = []
|
||||
else:
|
||||
yield line_number, line
|
||||
else:
|
||||
if not new_line:
|
||||
primary_line_number = line_number
|
||||
new_line.append(line.strip('\\'))
|
||||
|
||||
# last line contains \
|
||||
if new_line:
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
|
||||
# TODO: handle space after '\'.
|
||||
|
||||
|
||||
def ignore_comments(lines_enum):
|
||||
"""
|
||||
Strips comments and filter empty lines.
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
line = COMMENT_RE.sub('', line)
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line_number, line
|
||||
|
||||
|
||||
def skip_regex(lines_enum, options):
|
||||
"""
|
||||
Skip lines that match '--skip-requirements-regex' pattern
|
||||
|
||||
Note: the regex pattern is only built once
|
||||
"""
|
||||
skip_regex = options.skip_requirements_regex if options else None
|
||||
if skip_regex:
|
||||
pattern = re.compile(skip_regex)
|
||||
lines_enum = filterfalse(
|
||||
lambda e: pattern.search(e[1]),
|
||||
lines_enum)
|
||||
return lines_enum
|
1204
Master/env_master/Lib/site-packages/pip/req/req_install.py
Normal file
1204
Master/env_master/Lib/site-packages/pip/req/req_install.py
Normal file
File diff suppressed because it is too large
Load Diff
798
Master/env_master/Lib/site-packages/pip/req/req_set.py
Normal file
798
Master/env_master/Lib/site-packages/pip/req/req_set.py
Normal file
@ -0,0 +1,798 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor import requests
|
||||
|
||||
from pip.compat import expanduser
|
||||
from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path,
|
||||
unpack_url)
|
||||
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
|
||||
DistributionNotFound, PreviousBuildDirError,
|
||||
HashError, HashErrors, HashUnpinned,
|
||||
DirectoryUrlHashUnsupported, VcsHashUnsupported,
|
||||
UnsupportedPythonVersion)
|
||||
from pip.req.req_install import InstallRequirement
|
||||
from pip.utils import (
|
||||
display_path, dist_in_usersite, ensure_dir, normalize_path)
|
||||
from pip.utils.hashes import MissingHashes
|
||||
from pip.utils.logging import indent_log
|
||||
from pip.utils.packaging import check_dist_requires_python
|
||||
from pip.vcs import vcs
|
||||
from pip.wheel import Wheel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Requirements(object):
|
||||
|
||||
def __init__(self):
|
||||
self._keys = []
|
||||
self._dict = {}
|
||||
|
||||
def keys(self):
|
||||
return self._keys
|
||||
|
||||
def values(self):
|
||||
return [self._dict[key] for key in self._keys]
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._keys
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self._keys:
|
||||
self._keys.append(key)
|
||||
self._dict[key] = value
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._dict[key]
|
||||
|
||||
def __repr__(self):
|
||||
values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
|
||||
return 'Requirements({%s})' % ', '.join(values)
|
||||
|
||||
|
||||
class DistAbstraction(object):
|
||||
"""Abstracts out the wheel vs non-wheel prepare_files logic.
|
||||
|
||||
The requirements for anything installable are as follows:
|
||||
- we must be able to determine the requirement name
|
||||
(or we can't correctly handle the non-upgrade case).
|
||||
- we must be able to generate a list of run-time dependencies
|
||||
without installing any additional packages (or we would
|
||||
have to either burn time by doing temporary isolated installs
|
||||
or alternatively violate pips 'don't start installing unless
|
||||
all requirements are available' rule - neither of which are
|
||||
desirable).
|
||||
- for packages with setup requirements, we must also be able
|
||||
to determine their requirements without installing additional
|
||||
packages (for the same reason as run-time dependencies)
|
||||
- we must be able to create a Distribution object exposing the
|
||||
above metadata.
|
||||
"""
|
||||
|
||||
def __init__(self, req_to_install):
|
||||
self.req_to_install = req_to_install
|
||||
|
||||
def dist(self, finder):
|
||||
"""Return a setuptools Dist object."""
|
||||
raise NotImplementedError(self.dist)
|
||||
|
||||
def prep_for_dist(self):
|
||||
"""Ensure that we can get a Dist for this requirement."""
|
||||
raise NotImplementedError(self.dist)
|
||||
|
||||
|
||||
def make_abstract_dist(req_to_install):
|
||||
"""Factory to make an abstract dist object.
|
||||
|
||||
Preconditions: Either an editable req with a source_dir, or satisfied_by or
|
||||
a wheel link, or a non-editable req with a source_dir.
|
||||
|
||||
:return: A concrete DistAbstraction.
|
||||
"""
|
||||
if req_to_install.editable:
|
||||
return IsSDist(req_to_install)
|
||||
elif req_to_install.link and req_to_install.link.is_wheel:
|
||||
return IsWheel(req_to_install)
|
||||
else:
|
||||
return IsSDist(req_to_install)
|
||||
|
||||
|
||||
class IsWheel(DistAbstraction):
|
||||
|
||||
def dist(self, finder):
|
||||
return list(pkg_resources.find_distributions(
|
||||
self.req_to_install.source_dir))[0]
|
||||
|
||||
def prep_for_dist(self):
|
||||
# FIXME:https://github.com/pypa/pip/issues/1112
|
||||
pass
|
||||
|
||||
|
||||
class IsSDist(DistAbstraction):
|
||||
|
||||
def dist(self, finder):
|
||||
dist = self.req_to_install.get_dist()
|
||||
# FIXME: shouldn't be globally added:
|
||||
if dist.has_metadata('dependency_links.txt'):
|
||||
finder.add_dependency_links(
|
||||
dist.get_metadata_lines('dependency_links.txt')
|
||||
)
|
||||
return dist
|
||||
|
||||
def prep_for_dist(self):
|
||||
self.req_to_install.run_egg_info()
|
||||
self.req_to_install.assert_source_matches_version()
|
||||
|
||||
|
||||
class Installed(DistAbstraction):
|
||||
|
||||
def dist(self, finder):
|
||||
return self.req_to_install.satisfied_by
|
||||
|
||||
def prep_for_dist(self):
|
||||
pass
|
||||
|
||||
|
||||
class RequirementSet(object):
|
||||
|
||||
def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
|
||||
upgrade_strategy=None, ignore_installed=False, as_egg=False,
|
||||
target_dir=None, ignore_dependencies=False,
|
||||
force_reinstall=False, use_user_site=False, session=None,
|
||||
pycompile=True, isolated=False, wheel_download_dir=None,
|
||||
wheel_cache=None, require_hashes=False,
|
||||
ignore_requires_python=False):
|
||||
"""Create a RequirementSet.
|
||||
|
||||
:param wheel_download_dir: Where still-packed .whl files should be
|
||||
written to. If None they are written to the download_dir parameter.
|
||||
Separate to download_dir to permit only keeping wheel archives for
|
||||
pip wheel.
|
||||
:param download_dir: Where still packed archives should be written to.
|
||||
If None they are not saved, and are deleted immediately after
|
||||
unpacking.
|
||||
:param wheel_cache: The pip wheel cache, for passing to
|
||||
InstallRequirement.
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"RequirementSet() missing 1 required keyword argument: "
|
||||
"'session'"
|
||||
)
|
||||
|
||||
self.build_dir = build_dir
|
||||
self.src_dir = src_dir
|
||||
# XXX: download_dir and wheel_download_dir overlap semantically and may
|
||||
# be combined if we're willing to have non-wheel archives present in
|
||||
# the wheelhouse output by 'pip wheel'.
|
||||
self.download_dir = download_dir
|
||||
self.upgrade = upgrade
|
||||
self.upgrade_strategy = upgrade_strategy
|
||||
self.ignore_installed = ignore_installed
|
||||
self.force_reinstall = force_reinstall
|
||||
self.requirements = Requirements()
|
||||
# Mapping of alias: real_name
|
||||
self.requirement_aliases = {}
|
||||
self.unnamed_requirements = []
|
||||
self.ignore_dependencies = ignore_dependencies
|
||||
self.ignore_requires_python = ignore_requires_python
|
||||
self.successfully_downloaded = []
|
||||
self.successfully_installed = []
|
||||
self.reqs_to_cleanup = []
|
||||
self.as_egg = as_egg
|
||||
self.use_user_site = use_user_site
|
||||
self.target_dir = target_dir # set from --target option
|
||||
self.session = session
|
||||
self.pycompile = pycompile
|
||||
self.isolated = isolated
|
||||
if wheel_download_dir:
|
||||
wheel_download_dir = normalize_path(wheel_download_dir)
|
||||
self.wheel_download_dir = wheel_download_dir
|
||||
self._wheel_cache = wheel_cache
|
||||
self.require_hashes = require_hashes
|
||||
# Maps from install_req -> dependencies_of_install_req
|
||||
self._dependencies = defaultdict(list)
|
||||
|
||||
def __str__(self):
|
||||
reqs = [req for req in self.requirements.values()
|
||||
if not req.comes_from]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
return ' '.join([str(req.req) for req in reqs])
|
||||
|
||||
def __repr__(self):
|
||||
reqs = [req for req in self.requirements.values()]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
reqs_str = ', '.join([str(req.req) for req in reqs])
|
||||
return ('<%s object; %d requirement(s): %s>'
|
||||
% (self.__class__.__name__, len(reqs), reqs_str))
|
||||
|
||||
def add_requirement(self, install_req, parent_req_name=None,
|
||||
extras_requested=None):
|
||||
"""Add install_req as a requirement to install.
|
||||
|
||||
:param parent_req_name: The name of the requirement that needed this
|
||||
added. The name is used because when multiple unnamed requirements
|
||||
resolve to the same name, we could otherwise end up with dependency
|
||||
links that point outside the Requirements set. parent_req must
|
||||
already be added. Note that None implies that this is a user
|
||||
supplied requirement, vs an inferred one.
|
||||
:param extras_requested: an iterable of extras used to evaluate the
|
||||
environement markers.
|
||||
:return: Additional requirements to scan. That is either [] if
|
||||
the requirement is not applicable, or [install_req] if the
|
||||
requirement is applicable and has just been added.
|
||||
"""
|
||||
name = install_req.name
|
||||
if not install_req.match_markers(extras_requested):
|
||||
logger.warning("Ignoring %s: markers '%s' don't match your "
|
||||
"environment", install_req.name,
|
||||
install_req.markers)
|
||||
return []
|
||||
|
||||
# This check has to come after we filter requirements with the
|
||||
# environment markers.
|
||||
if install_req.link and install_req.link.is_wheel:
|
||||
wheel = Wheel(install_req.link.filename)
|
||||
if not wheel.supported():
|
||||
raise InstallationError(
|
||||
"%s is not a supported wheel on this platform." %
|
||||
wheel.filename
|
||||
)
|
||||
|
||||
install_req.as_egg = self.as_egg
|
||||
install_req.use_user_site = self.use_user_site
|
||||
install_req.target_dir = self.target_dir
|
||||
install_req.pycompile = self.pycompile
|
||||
install_req.is_direct = (parent_req_name is None)
|
||||
|
||||
if not name:
|
||||
# url or path requirement w/o an egg fragment
|
||||
self.unnamed_requirements.append(install_req)
|
||||
return [install_req]
|
||||
else:
|
||||
try:
|
||||
existing_req = self.get_requirement(name)
|
||||
except KeyError:
|
||||
existing_req = None
|
||||
if (parent_req_name is None and existing_req and not
|
||||
existing_req.constraint and
|
||||
existing_req.extras == install_req.extras and not
|
||||
existing_req.req.specifier == install_req.req.specifier):
|
||||
raise InstallationError(
|
||||
'Double requirement given: %s (already in %s, name=%r)'
|
||||
% (install_req, existing_req, name))
|
||||
if not existing_req:
|
||||
# Add requirement
|
||||
self.requirements[name] = install_req
|
||||
# FIXME: what about other normalizations? E.g., _ vs. -?
|
||||
if name.lower() != name:
|
||||
self.requirement_aliases[name.lower()] = name
|
||||
result = [install_req]
|
||||
else:
|
||||
# Assume there's no need to scan, and that we've already
|
||||
# encountered this for scanning.
|
||||
result = []
|
||||
if not install_req.constraint and existing_req.constraint:
|
||||
if (install_req.link and not (existing_req.link and
|
||||
install_req.link.path == existing_req.link.path)):
|
||||
self.reqs_to_cleanup.append(install_req)
|
||||
raise InstallationError(
|
||||
"Could not satisfy constraints for '%s': "
|
||||
"installation from path or url cannot be "
|
||||
"constrained to a version" % name)
|
||||
# If we're now installing a constraint, mark the existing
|
||||
# object for real installation.
|
||||
existing_req.constraint = False
|
||||
existing_req.extras = tuple(
|
||||
sorted(set(existing_req.extras).union(
|
||||
set(install_req.extras))))
|
||||
logger.debug("Setting %s extras to: %s",
|
||||
existing_req, existing_req.extras)
|
||||
# And now we need to scan this.
|
||||
result = [existing_req]
|
||||
# Canonicalise to the already-added object for the backref
|
||||
# check below.
|
||||
install_req = existing_req
|
||||
if parent_req_name:
|
||||
parent_req = self.get_requirement(parent_req_name)
|
||||
self._dependencies[parent_req].append(install_req)
|
||||
return result
|
||||
|
||||
def has_requirement(self, project_name):
|
||||
name = project_name.lower()
|
||||
if (name in self.requirements and
|
||||
not self.requirements[name].constraint or
|
||||
name in self.requirement_aliases and
|
||||
not self.requirements[self.requirement_aliases[name]].constraint):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def has_requirements(self):
|
||||
return list(req for req in self.requirements.values() if not
|
||||
req.constraint) or self.unnamed_requirements
|
||||
|
||||
@property
|
||||
def is_download(self):
|
||||
if self.download_dir:
|
||||
self.download_dir = expanduser(self.download_dir)
|
||||
if os.path.exists(self.download_dir):
|
||||
return True
|
||||
else:
|
||||
logger.critical('Could not find download directory')
|
||||
raise InstallationError(
|
||||
"Could not find or access download directory '%s'"
|
||||
% display_path(self.download_dir))
|
||||
return False
|
||||
|
||||
def get_requirement(self, project_name):
|
||||
for name in project_name, project_name.lower():
|
||||
if name in self.requirements:
|
||||
return self.requirements[name]
|
||||
if name in self.requirement_aliases:
|
||||
return self.requirements[self.requirement_aliases[name]]
|
||||
raise KeyError("No project with the name %r" % project_name)
|
||||
|
||||
def uninstall(self, auto_confirm=False):
|
||||
for req in self.requirements.values():
|
||||
if req.constraint:
|
||||
continue
|
||||
req.uninstall(auto_confirm=auto_confirm)
|
||||
req.commit_uninstall()
|
||||
|
||||
def prepare_files(self, finder):
|
||||
"""
|
||||
Prepare process. Create temp directories, download and/or unpack files.
|
||||
"""
|
||||
# make the wheelhouse
|
||||
if self.wheel_download_dir:
|
||||
ensure_dir(self.wheel_download_dir)
|
||||
|
||||
# If any top-level requirement has a hash specified, enter
|
||||
# hash-checking mode, which requires hashes from all.
|
||||
root_reqs = self.unnamed_requirements + self.requirements.values()
|
||||
require_hashes = (self.require_hashes or
|
||||
any(req.has_hash_options for req in root_reqs))
|
||||
if require_hashes and self.as_egg:
|
||||
raise InstallationError(
|
||||
'--egg is not allowed with --require-hashes mode, since it '
|
||||
'delegates dependency resolution to setuptools and could thus '
|
||||
'result in installation of unhashed packages.')
|
||||
|
||||
# Actually prepare the files, and collect any exceptions. Most hash
|
||||
# exceptions cannot be checked ahead of time, because
|
||||
# req.populate_link() needs to be called before we can make decisions
|
||||
# based on link type.
|
||||
discovered_reqs = []
|
||||
hash_errors = HashErrors()
|
||||
for req in chain(root_reqs, discovered_reqs):
|
||||
try:
|
||||
discovered_reqs.extend(self._prepare_file(
|
||||
finder,
|
||||
req,
|
||||
require_hashes=require_hashes,
|
||||
ignore_dependencies=self.ignore_dependencies))
|
||||
except HashError as exc:
|
||||
exc.req = req
|
||||
hash_errors.append(exc)
|
||||
|
||||
if hash_errors:
|
||||
raise hash_errors
|
||||
|
||||
def _is_upgrade_allowed(self, req):
|
||||
return self.upgrade and (
|
||||
self.upgrade_strategy == "eager" or (
|
||||
self.upgrade_strategy == "only-if-needed" and req.is_direct
|
||||
)
|
||||
)
|
||||
|
||||
def _check_skip_installed(self, req_to_install, finder):
|
||||
"""Check if req_to_install should be skipped.
|
||||
|
||||
This will check if the req is installed, and whether we should upgrade
|
||||
or reinstall it, taking into account all the relevant user options.
|
||||
|
||||
After calling this req_to_install will only have satisfied_by set to
|
||||
None if the req_to_install is to be upgraded/reinstalled etc. Any
|
||||
other value will be a dist recording the current thing installed that
|
||||
satisfies the requirement.
|
||||
|
||||
Note that for vcs urls and the like we can't assess skipping in this
|
||||
routine - we simply identify that we need to pull the thing down,
|
||||
then later on it is pulled down and introspected to assess upgrade/
|
||||
reinstalls etc.
|
||||
|
||||
:return: A text reason for why it was skipped, or None.
|
||||
"""
|
||||
# Check whether to upgrade/reinstall this req or not.
|
||||
req_to_install.check_if_exists()
|
||||
if req_to_install.satisfied_by:
|
||||
upgrade_allowed = self._is_upgrade_allowed(req_to_install)
|
||||
|
||||
# Is the best version is installed.
|
||||
best_installed = False
|
||||
|
||||
if upgrade_allowed:
|
||||
# For link based requirements we have to pull the
|
||||
# tree down and inspect to assess the version #, so
|
||||
# its handled way down.
|
||||
if not (self.force_reinstall or req_to_install.link):
|
||||
try:
|
||||
finder.find_requirement(
|
||||
req_to_install, upgrade_allowed)
|
||||
except BestVersionAlreadyInstalled:
|
||||
best_installed = True
|
||||
except DistributionNotFound:
|
||||
# No distribution found, so we squash the
|
||||
# error - it will be raised later when we
|
||||
# re-try later to do the install.
|
||||
# Why don't we just raise here?
|
||||
pass
|
||||
|
||||
if not best_installed:
|
||||
# don't uninstall conflict if user install and
|
||||
# conflict is not user install
|
||||
if not (self.use_user_site and not
|
||||
dist_in_usersite(req_to_install.satisfied_by)):
|
||||
req_to_install.conflicts_with = \
|
||||
req_to_install.satisfied_by
|
||||
req_to_install.satisfied_by = None
|
||||
|
||||
# Figure out a nice message to say why we're skipping this.
|
||||
if best_installed:
|
||||
skip_reason = 'already up-to-date'
|
||||
elif self.upgrade_strategy == "only-if-needed":
|
||||
skip_reason = 'not upgraded as not directly required'
|
||||
else:
|
||||
skip_reason = 'already satisfied'
|
||||
|
||||
return skip_reason
|
||||
else:
|
||||
return None
|
||||
|
||||
def _prepare_file(self,
|
||||
finder,
|
||||
req_to_install,
|
||||
require_hashes=False,
|
||||
ignore_dependencies=False):
|
||||
"""Prepare a single requirements file.
|
||||
|
||||
:return: A list of additional InstallRequirements to also install.
|
||||
"""
|
||||
# Tell user what we are doing for this requirement:
|
||||
# obtain (editable), skipping, processing (local url), collecting
|
||||
# (remote url or package name)
|
||||
if req_to_install.constraint or req_to_install.prepared:
|
||||
return []
|
||||
|
||||
req_to_install.prepared = True
|
||||
|
||||
# ###################### #
|
||||
# # print log messages # #
|
||||
# ###################### #
|
||||
if req_to_install.editable:
|
||||
logger.info('Obtaining %s', req_to_install)
|
||||
else:
|
||||
# satisfied_by is only evaluated by calling _check_skip_installed,
|
||||
# so it must be None here.
|
||||
assert req_to_install.satisfied_by is None
|
||||
if not self.ignore_installed:
|
||||
skip_reason = self._check_skip_installed(
|
||||
req_to_install, finder)
|
||||
|
||||
if req_to_install.satisfied_by:
|
||||
assert skip_reason is not None, (
|
||||
'_check_skip_installed returned None but '
|
||||
'req_to_install.satisfied_by is set to %r'
|
||||
% (req_to_install.satisfied_by,))
|
||||
logger.info(
|
||||
'Requirement %s: %s', skip_reason,
|
||||
req_to_install)
|
||||
else:
|
||||
if (req_to_install.link and
|
||||
req_to_install.link.scheme == 'file'):
|
||||
path = url_to_path(req_to_install.link.url)
|
||||
logger.info('Processing %s', display_path(path))
|
||||
else:
|
||||
logger.info('Collecting %s', req_to_install)
|
||||
|
||||
with indent_log():
|
||||
# ################################ #
|
||||
# # vcs update or unpack archive # #
|
||||
# ################################ #
|
||||
if req_to_install.editable:
|
||||
if require_hashes:
|
||||
raise InstallationError(
|
||||
'The editable requirement %s cannot be installed when '
|
||||
'requiring hashes, because there is no single file to '
|
||||
'hash.' % req_to_install)
|
||||
req_to_install.ensure_has_source_dir(self.src_dir)
|
||||
req_to_install.update_editable(not self.is_download)
|
||||
abstract_dist = make_abstract_dist(req_to_install)
|
||||
abstract_dist.prep_for_dist()
|
||||
if self.is_download:
|
||||
req_to_install.archive(self.download_dir)
|
||||
req_to_install.check_if_exists()
|
||||
elif req_to_install.satisfied_by:
|
||||
if require_hashes:
|
||||
logger.debug(
|
||||
'Since it is already installed, we are trusting this '
|
||||
'package without checking its hash. To ensure a '
|
||||
'completely repeatable environment, install into an '
|
||||
'empty virtualenv.')
|
||||
abstract_dist = Installed(req_to_install)
|
||||
else:
|
||||
# @@ if filesystem packages are not marked
|
||||
# editable in a req, a non deterministic error
|
||||
# occurs when the script attempts to unpack the
|
||||
# build directory
|
||||
req_to_install.ensure_has_source_dir(self.build_dir)
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req_to_install.source_dir`
|
||||
if os.path.exists(
|
||||
os.path.join(req_to_install.source_dir, 'setup.py')):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '%s' due to a"
|
||||
" pre-existing build directory (%s). This is "
|
||||
"likely due to a previous installation that failed"
|
||||
". pip is being responsible and not assuming it "
|
||||
"can delete this. Please delete it and try again."
|
||||
% (req_to_install, req_to_install.source_dir)
|
||||
)
|
||||
req_to_install.populate_link(
|
||||
finder,
|
||||
self._is_upgrade_allowed(req_to_install),
|
||||
require_hashes
|
||||
)
|
||||
# We can't hit this spot and have populate_link return None.
|
||||
# req_to_install.satisfied_by is None here (because we're
|
||||
# guarded) and upgrade has no impact except when satisfied_by
|
||||
# is not None.
|
||||
# Then inside find_requirement existing_applicable -> False
|
||||
# If no new versions are found, DistributionNotFound is raised,
|
||||
# otherwise a result is guaranteed.
|
||||
assert req_to_install.link
|
||||
link = req_to_install.link
|
||||
|
||||
# Now that we have the real link, we can tell what kind of
|
||||
# requirements we have and raise some more informative errors
|
||||
# than otherwise. (For example, we can raise VcsHashUnsupported
|
||||
# for a VCS URL rather than HashMissing.)
|
||||
if require_hashes:
|
||||
# We could check these first 2 conditions inside
|
||||
# unpack_url and save repetition of conditions, but then
|
||||
# we would report less-useful error messages for
|
||||
# unhashable requirements, complaining that there's no
|
||||
# hash provided.
|
||||
if is_vcs_url(link):
|
||||
raise VcsHashUnsupported()
|
||||
elif is_file_url(link) and is_dir_url(link):
|
||||
raise DirectoryUrlHashUnsupported()
|
||||
if (not req_to_install.original_link and
|
||||
not req_to_install.is_pinned):
|
||||
# Unpinned packages are asking for trouble when a new
|
||||
# version is uploaded. This isn't a security check, but
|
||||
# it saves users a surprising hash mismatch in the
|
||||
# future.
|
||||
#
|
||||
# file:/// URLs aren't pinnable, so don't complain
|
||||
# about them not being pinned.
|
||||
raise HashUnpinned()
|
||||
hashes = req_to_install.hashes(
|
||||
trust_internet=not require_hashes)
|
||||
if require_hashes and not hashes:
|
||||
# Known-good hashes are missing for this requirement, so
|
||||
# shim it with a facade object that will provoke hash
|
||||
# computation and then raise a HashMissing exception
|
||||
# showing the user what the hash should be.
|
||||
hashes = MissingHashes()
|
||||
|
||||
try:
|
||||
download_dir = self.download_dir
|
||||
# We always delete unpacked sdists after pip ran.
|
||||
autodelete_unpacked = True
|
||||
if req_to_install.link.is_wheel \
|
||||
and self.wheel_download_dir:
|
||||
# when doing 'pip wheel` we download wheels to a
|
||||
# dedicated dir.
|
||||
download_dir = self.wheel_download_dir
|
||||
if req_to_install.link.is_wheel:
|
||||
if download_dir:
|
||||
# When downloading, we only unpack wheels to get
|
||||
# metadata.
|
||||
autodelete_unpacked = True
|
||||
else:
|
||||
# When installing a wheel, we use the unpacked
|
||||
# wheel.
|
||||
autodelete_unpacked = False
|
||||
unpack_url(
|
||||
req_to_install.link, req_to_install.source_dir,
|
||||
download_dir, autodelete_unpacked,
|
||||
session=self.session, hashes=hashes)
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
'Could not install requirement %s because '
|
||||
'of error %s',
|
||||
req_to_install,
|
||||
exc,
|
||||
)
|
||||
raise InstallationError(
|
||||
'Could not install requirement %s because '
|
||||
'of HTTP error %s for URL %s' %
|
||||
(req_to_install, exc, req_to_install.link)
|
||||
)
|
||||
abstract_dist = make_abstract_dist(req_to_install)
|
||||
abstract_dist.prep_for_dist()
|
||||
if self.is_download:
|
||||
# Make a .zip of the source_dir we already created.
|
||||
if req_to_install.link.scheme in vcs.all_schemes:
|
||||
req_to_install.archive(self.download_dir)
|
||||
# req_to_install.req is only avail after unpack for URL
|
||||
# pkgs repeat check_if_exists to uninstall-on-upgrade
|
||||
# (#14)
|
||||
if not self.ignore_installed:
|
||||
req_to_install.check_if_exists()
|
||||
if req_to_install.satisfied_by:
|
||||
if self.upgrade or self.ignore_installed:
|
||||
# don't uninstall conflict if user install and
|
||||
# conflict is not user install
|
||||
if not (self.use_user_site and not
|
||||
dist_in_usersite(
|
||||
req_to_install.satisfied_by)):
|
||||
req_to_install.conflicts_with = \
|
||||
req_to_install.satisfied_by
|
||||
req_to_install.satisfied_by = None
|
||||
else:
|
||||
logger.info(
|
||||
'Requirement already satisfied (use '
|
||||
'--upgrade to upgrade): %s',
|
||||
req_to_install,
|
||||
)
|
||||
|
||||
# ###################### #
|
||||
# # parse dependencies # #
|
||||
# ###################### #
|
||||
dist = abstract_dist.dist(finder)
|
||||
try:
|
||||
check_dist_requires_python(dist)
|
||||
except UnsupportedPythonVersion as e:
|
||||
if self.ignore_requires_python:
|
||||
logger.warning(e.args[0])
|
||||
else:
|
||||
req_to_install.remove_temporary_source()
|
||||
raise
|
||||
more_reqs = []
|
||||
|
||||
def add_req(subreq, extras_requested):
|
||||
sub_install_req = InstallRequirement(
|
||||
str(subreq),
|
||||
req_to_install,
|
||||
isolated=self.isolated,
|
||||
wheel_cache=self._wheel_cache,
|
||||
)
|
||||
more_reqs.extend(self.add_requirement(
|
||||
sub_install_req, req_to_install.name,
|
||||
extras_requested=extras_requested))
|
||||
|
||||
# We add req_to_install before its dependencies, so that we
|
||||
# can refer to it when adding dependencies.
|
||||
if not self.has_requirement(req_to_install.name):
|
||||
# 'unnamed' requirements will get added here
|
||||
self.add_requirement(req_to_install, None)
|
||||
|
||||
if not ignore_dependencies:
|
||||
if (req_to_install.extras):
|
||||
logger.debug(
|
||||
"Installing extra requirements: %r",
|
||||
','.join(req_to_install.extras),
|
||||
)
|
||||
missing_requested = sorted(
|
||||
set(req_to_install.extras) - set(dist.extras)
|
||||
)
|
||||
for missing in missing_requested:
|
||||
logger.warning(
|
||||
'%s does not provide the extra \'%s\'',
|
||||
dist, missing
|
||||
)
|
||||
|
||||
available_requested = sorted(
|
||||
set(dist.extras) & set(req_to_install.extras)
|
||||
)
|
||||
for subreq in dist.requires(available_requested):
|
||||
add_req(subreq, extras_requested=available_requested)
|
||||
|
||||
# cleanup tmp src
|
||||
self.reqs_to_cleanup.append(req_to_install)
|
||||
|
||||
if not req_to_install.editable and not req_to_install.satisfied_by:
|
||||
# XXX: --no-install leads this to report 'Successfully
|
||||
# downloaded' for only non-editable reqs, even though we took
|
||||
# action on them.
|
||||
self.successfully_downloaded.append(req_to_install)
|
||||
|
||||
return more_reqs
|
||||
|
||||
def cleanup_files(self):
|
||||
"""Clean up files, remove builds."""
|
||||
logger.debug('Cleaning up...')
|
||||
with indent_log():
|
||||
for req in self.reqs_to_cleanup:
|
||||
req.remove_temporary_source()
|
||||
|
||||
def _to_install(self):
|
||||
"""Create the installation order.
|
||||
|
||||
The installation order is topological - requirements are installed
|
||||
before the requiring thing. We break cycles at an arbitrary point,
|
||||
and make no other guarantees.
|
||||
"""
|
||||
# The current implementation, which we may change at any point
|
||||
# installs the user specified things in the order given, except when
|
||||
# dependencies must come earlier to achieve topological order.
|
||||
order = []
|
||||
ordered_reqs = set()
|
||||
|
||||
def schedule(req):
|
||||
if req.satisfied_by or req in ordered_reqs:
|
||||
return
|
||||
if req.constraint:
|
||||
return
|
||||
ordered_reqs.add(req)
|
||||
for dep in self._dependencies[req]:
|
||||
schedule(dep)
|
||||
order.append(req)
|
||||
for install_req in self.requirements.values():
|
||||
schedule(install_req)
|
||||
return order
|
||||
|
||||
def install(self, install_options, global_options=(), *args, **kwargs):
|
||||
"""
|
||||
Install everything in this set (after having downloaded and unpacked
|
||||
the packages)
|
||||
"""
|
||||
to_install = self._to_install()
|
||||
|
||||
if to_install:
|
||||
logger.info(
|
||||
'Installing collected packages: %s',
|
||||
', '.join([req.name for req in to_install]),
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
for requirement in to_install:
|
||||
if requirement.conflicts_with:
|
||||
logger.info(
|
||||
'Found existing installation: %s',
|
||||
requirement.conflicts_with,
|
||||
)
|
||||
with indent_log():
|
||||
requirement.uninstall(auto_confirm=True)
|
||||
try:
|
||||
requirement.install(
|
||||
install_options,
|
||||
global_options,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
except:
|
||||
# if install did not succeed, rollback previous uninstall
|
||||
if (requirement.conflicts_with and not
|
||||
requirement.install_succeeded):
|
||||
requirement.rollback_uninstall()
|
||||
raise
|
||||
else:
|
||||
if (requirement.conflicts_with and
|
||||
requirement.install_succeeded):
|
||||
requirement.commit_uninstall()
|
||||
requirement.remove_temporary_source()
|
||||
|
||||
self.successfully_installed = to_install
|
195
Master/env_master/Lib/site-packages/pip/req/req_uninstall.py
Normal file
195
Master/env_master/Lib/site-packages/pip/req/req_uninstall.py
Normal file
@ -0,0 +1,195 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from pip.compat import uses_pycache, WINDOWS, cache_from_source
|
||||
from pip.exceptions import UninstallationError
|
||||
from pip.utils import rmtree, ask, is_local, renames, normalize_path
|
||||
from pip.utils.logging import indent_log
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UninstallPathSet(object):
|
||||
"""A set of file paths to be removed in the uninstallation of a
|
||||
requirement."""
|
||||
def __init__(self, dist):
|
||||
self.paths = set()
|
||||
self._refuse = set()
|
||||
self.pth = {}
|
||||
self.dist = dist
|
||||
self.save_dir = None
|
||||
self._moved_paths = []
|
||||
|
||||
def _permitted(self, path):
|
||||
"""
|
||||
Return True if the given path is one we are permitted to
|
||||
remove/modify, False otherwise.
|
||||
|
||||
"""
|
||||
return is_local(path)
|
||||
|
||||
def add(self, path):
|
||||
head, tail = os.path.split(path)
|
||||
|
||||
# we normalize the head to resolve parent directory symlinks, but not
|
||||
# the tail, since we only want to uninstall symlinks, not their targets
|
||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
if self._permitted(path):
|
||||
self.paths.add(path)
|
||||
else:
|
||||
self._refuse.add(path)
|
||||
|
||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||
# due to imports
|
||||
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
||||
self.add(cache_from_source(path))
|
||||
|
||||
def add_pth(self, pth_file, entry):
|
||||
pth_file = normalize_path(pth_file)
|
||||
if self._permitted(pth_file):
|
||||
if pth_file not in self.pth:
|
||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||
self.pth[pth_file].add(entry)
|
||||
else:
|
||||
self._refuse.add(pth_file)
|
||||
|
||||
def compact(self, paths):
|
||||
"""Compact a path set to contain the minimal number of paths
|
||||
necessary to contain all paths in the set. If /a/path/ and
|
||||
/a/path/to/a/file.txt are both in the set, leave only the
|
||||
shorter path."""
|
||||
short_paths = set()
|
||||
for path in sorted(paths, key=len):
|
||||
if not any([
|
||||
(path.startswith(shortpath) and
|
||||
path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
|
||||
for shortpath in short_paths]):
|
||||
short_paths.add(path)
|
||||
return short_paths
|
||||
|
||||
def _stash(self, path):
|
||||
return os.path.join(
|
||||
self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
|
||||
|
||||
def remove(self, auto_confirm=False):
|
||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||
``auto_confirm`` is True)."""
|
||||
if not self.paths:
|
||||
logger.info(
|
||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return
|
||||
logger.info(
|
||||
'Uninstalling %s-%s:',
|
||||
self.dist.project_name, self.dist.version
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
paths = sorted(self.compact(self.paths))
|
||||
|
||||
if auto_confirm:
|
||||
response = 'y'
|
||||
else:
|
||||
for path in paths:
|
||||
logger.info(path)
|
||||
response = ask('Proceed (y/n)? ', ('y', 'n'))
|
||||
if self._refuse:
|
||||
logger.info('Not removing or modifying (outside of prefix):')
|
||||
for path in self.compact(self._refuse):
|
||||
logger.info(path)
|
||||
if response == 'y':
|
||||
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
|
||||
prefix='pip-')
|
||||
for path in paths:
|
||||
new_path = self._stash(path)
|
||||
logger.debug('Removing file or directory %s', path)
|
||||
self._moved_paths.append(path)
|
||||
renames(path, new_path)
|
||||
for pth in self.pth.values():
|
||||
pth.remove()
|
||||
logger.info(
|
||||
'Successfully uninstalled %s-%s',
|
||||
self.dist.project_name, self.dist.version
|
||||
)
|
||||
|
||||
def rollback(self):
|
||||
"""Rollback the changes previously made by remove()."""
|
||||
if self.save_dir is None:
|
||||
logger.error(
|
||||
"Can't roll back %s; was not uninstalled",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return False
|
||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||
for path in self._moved_paths:
|
||||
tmp_path = self._stash(path)
|
||||
logger.debug('Replacing %s', path)
|
||||
renames(tmp_path, path)
|
||||
for pth in self.pth.values():
|
||||
pth.rollback()
|
||||
|
||||
def commit(self):
|
||||
"""Remove temporary save dir: rollback will no longer be possible."""
|
||||
if self.save_dir is not None:
|
||||
rmtree(self.save_dir)
|
||||
self.save_dir = None
|
||||
self._moved_paths = []
|
||||
|
||||
|
||||
class UninstallPthEntries(object):
|
||||
def __init__(self, pth_file):
|
||||
if not os.path.isfile(pth_file):
|
||||
raise UninstallationError(
|
||||
"Cannot remove entries from nonexistent file %s" % pth_file
|
||||
)
|
||||
self.file = pth_file
|
||||
self.entries = set()
|
||||
self._saved_lines = None
|
||||
|
||||
def add(self, entry):
|
||||
entry = os.path.normcase(entry)
|
||||
# On Windows, os.path.normcase converts the entry to use
|
||||
# backslashes. This is correct for entries that describe absolute
|
||||
# paths outside of site-packages, but all the others use forward
|
||||
# slashes.
|
||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||
entry = entry.replace('\\', '/')
|
||||
self.entries.add(entry)
|
||||
|
||||
def remove(self):
|
||||
logger.debug('Removing pth entries from %s:', self.file)
|
||||
with open(self.file, 'rb') as fh:
|
||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||
lines = fh.readlines()
|
||||
self._saved_lines = lines
|
||||
if any(b'\r\n' in line for line in lines):
|
||||
endline = '\r\n'
|
||||
else:
|
||||
endline = '\n'
|
||||
for entry in self.entries:
|
||||
try:
|
||||
logger.debug('Removing entry: %s', entry)
|
||||
lines.remove((entry + endline).encode("utf-8"))
|
||||
except ValueError:
|
||||
pass
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(lines)
|
||||
|
||||
def rollback(self):
|
||||
if self._saved_lines is None:
|
||||
logger.error(
|
||||
'Cannot roll back changes to %s, none were made', self.file
|
||||
)
|
||||
return False
|
||||
logger.debug('Rolling %s back to previous state', self.file)
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(self._saved_lines)
|
||||
return True
|
Reference in New Issue
Block a user