Including the venv
This commit is contained in:
78
lib/python3.8/site-packages/pip/_internal/req/__init__.py
Normal file
78
lib/python3.8/site-packages/pip/_internal/req/__init__.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
from .req_install import InstallRequirement
|
||||
from .req_set import RequirementSet
|
||||
from .req_file import parse_requirements
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, List, Sequence
|
||||
|
||||
__all__ = [
|
||||
"RequirementSet", "InstallRequirement",
|
||||
"parse_requirements", "install_given_reqs",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install_given_reqs(
|
||||
to_install, # type: List[InstallRequirement]
|
||||
install_options, # type: List[str]
|
||||
global_options=(), # type: Sequence[str]
|
||||
*args, # type: Any
|
||||
**kwargs # type: Any
|
||||
):
|
||||
# type: (...) -> List[InstallRequirement]
|
||||
"""
|
||||
Install everything in the given list.
|
||||
|
||||
(to be called after having downloaded and unpacked the packages)
|
||||
"""
|
||||
|
||||
if to_install:
|
||||
logger.info(
|
||||
'Installing collected packages: %s',
|
||||
', '.join([req.name for req in to_install]),
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
for requirement in to_install:
|
||||
if requirement.conflicts_with:
|
||||
logger.info(
|
||||
'Found existing installation: %s',
|
||||
requirement.conflicts_with,
|
||||
)
|
||||
with indent_log():
|
||||
uninstalled_pathset = requirement.uninstall(
|
||||
auto_confirm=True
|
||||
)
|
||||
try:
|
||||
requirement.install(
|
||||
install_options,
|
||||
global_options,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
except Exception:
|
||||
should_rollback = (
|
||||
requirement.conflicts_with and
|
||||
not requirement.install_succeeded
|
||||
)
|
||||
# if install did not succeed, rollback previous uninstall
|
||||
if should_rollback:
|
||||
uninstalled_pathset.rollback()
|
||||
raise
|
||||
else:
|
||||
should_commit = (
|
||||
requirement.conflicts_with and
|
||||
requirement.install_succeeded
|
||||
)
|
||||
if should_commit:
|
||||
uninstalled_pathset.commit()
|
||||
requirement.remove_temporary_source()
|
||||
|
||||
return to_install
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
349
lib/python3.8/site-packages/pip/_internal/req/constructors.py
Normal file
349
lib/python3.8/site-packages/pip/_internal/req/constructors.py
Normal file
@@ -0,0 +1,349 @@
|
||||
"""Backing implementation for InstallRequirement's various constructors
|
||||
|
||||
The idea here is that these formed a major chunk of InstallRequirement's size
|
||||
so, moving them and support code dedicated to them outside of that class
|
||||
helps creates for better understandability for the rest of the code.
|
||||
|
||||
These are meant to be used elsewhere within pip to create instances of
|
||||
InstallRequirement.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||
from pip._vendor.packaging.specifiers import Specifier
|
||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
||||
|
||||
from pip._internal.download import is_archive_file, is_url, url_to_path
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.models.index import PyPI, TestPyPI
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.pyproject import make_pyproject_path
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.misc import is_installable_dir, path_to_url
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.vcs import vcs
|
||||
from pip._internal.wheel import Wheel
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Any, Dict, Optional, Set, Tuple, Union,
|
||||
)
|
||||
from pip._internal.cache import WheelCache
|
||||
|
||||
|
||||
__all__ = [
|
||||
"install_req_from_editable", "install_req_from_line",
|
||||
"parse_editable"
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
operators = Specifier._operators.keys()
|
||||
|
||||
|
||||
def _strip_extras(path):
|
||||
# type: (str) -> Tuple[str, Optional[str]]
|
||||
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
||||
extras = None
|
||||
if m:
|
||||
path_no_extras = m.group(1)
|
||||
extras = m.group(2)
|
||||
else:
|
||||
path_no_extras = path
|
||||
|
||||
return path_no_extras, extras
|
||||
|
||||
|
||||
def parse_editable(editable_req):
|
||||
# type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
|
||||
"""Parses an editable requirement into:
|
||||
- a requirement name
|
||||
- an URL
|
||||
- extras
|
||||
- editable options
|
||||
Accepted requirements:
|
||||
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
||||
.[some_extra]
|
||||
"""
|
||||
|
||||
url = editable_req
|
||||
|
||||
# If a file path is specified with extras, strip off the extras.
|
||||
url_no_extras, extras = _strip_extras(url)
|
||||
|
||||
if os.path.isdir(url_no_extras):
|
||||
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
|
||||
msg = (
|
||||
'File "setup.py" not found. Directory cannot be installed '
|
||||
'in editable mode: {}'.format(os.path.abspath(url_no_extras))
|
||||
)
|
||||
pyproject_path = make_pyproject_path(url_no_extras)
|
||||
if os.path.isfile(pyproject_path):
|
||||
msg += (
|
||||
'\n(A "pyproject.toml" file was found, but editable '
|
||||
'mode currently requires a setup.py based build.)'
|
||||
)
|
||||
raise InstallationError(msg)
|
||||
|
||||
# Treating it as code that has already been checked out
|
||||
url_no_extras = path_to_url(url_no_extras)
|
||||
|
||||
if url_no_extras.lower().startswith('file:'):
|
||||
package_name = Link(url_no_extras).egg_fragment
|
||||
if extras:
|
||||
return (
|
||||
package_name,
|
||||
url_no_extras,
|
||||
Requirement("placeholder" + extras.lower()).extras,
|
||||
)
|
||||
else:
|
||||
return package_name, url_no_extras, None
|
||||
|
||||
for version_control in vcs:
|
||||
if url.lower().startswith('%s:' % version_control):
|
||||
url = '%s+%s' % (version_control, url)
|
||||
break
|
||||
|
||||
if '+' not in url:
|
||||
raise InstallationError(
|
||||
'{} is not a valid editable requirement. '
|
||||
'It should either be a path to a local project or a VCS URL '
|
||||
'(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req)
|
||||
)
|
||||
|
||||
vc_type = url.split('+', 1)[0].lower()
|
||||
|
||||
if not vcs.get_backend(vc_type):
|
||||
error_message = 'For --editable=%s only ' % editable_req + \
|
||||
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
|
||||
' is currently supported'
|
||||
raise InstallationError(error_message)
|
||||
|
||||
package_name = Link(url).egg_fragment
|
||||
if not package_name:
|
||||
raise InstallationError(
|
||||
"Could not detect requirement name for '%s', please specify one "
|
||||
"with #egg=your_package_name" % editable_req
|
||||
)
|
||||
return package_name, url, None
|
||||
|
||||
|
||||
def deduce_helpful_msg(req):
|
||||
# type: (str) -> str
|
||||
"""Returns helpful msg in case requirements file does not exist,
|
||||
or cannot be parsed.
|
||||
|
||||
:params req: Requirements file path
|
||||
"""
|
||||
msg = ""
|
||||
if os.path.exists(req):
|
||||
msg = " It does exist."
|
||||
# Try to parse and check if it is a requirements file.
|
||||
try:
|
||||
with open(req, 'r') as fp:
|
||||
# parse first line only
|
||||
next(parse_requirements(fp.read()))
|
||||
msg += " The argument you provided " + \
|
||||
"(%s) appears to be a" % (req) + \
|
||||
" requirements file. If that is the" + \
|
||||
" case, use the '-r' flag to install" + \
|
||||
" the packages specified within it."
|
||||
except RequirementParseError:
|
||||
logger.debug("Cannot parse '%s' as requirements \
|
||||
file" % (req), exc_info=True)
|
||||
else:
|
||||
msg += " File '%s' does not exist." % (req)
|
||||
return msg
|
||||
|
||||
|
||||
# ---- The actual constructors follow ----
|
||||
|
||||
|
||||
def install_req_from_editable(
|
||||
editable_req, # type: str
|
||||
comes_from=None, # type: Optional[str]
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
isolated=False, # type: bool
|
||||
options=None, # type: Optional[Dict[str, Any]]
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
constraint=False # type: bool
|
||||
):
|
||||
# type: (...) -> InstallRequirement
|
||||
name, url, extras_override = parse_editable(editable_req)
|
||||
if url.startswith('file:'):
|
||||
source_dir = url_to_path(url)
|
||||
else:
|
||||
source_dir = None
|
||||
|
||||
if name is not None:
|
||||
try:
|
||||
req = Requirement(name)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError("Invalid requirement: '%s'" % name)
|
||||
else:
|
||||
req = None
|
||||
return InstallRequirement(
|
||||
req, comes_from, source_dir=source_dir,
|
||||
editable=True,
|
||||
link=Link(url),
|
||||
constraint=constraint,
|
||||
use_pep517=use_pep517,
|
||||
isolated=isolated,
|
||||
options=options if options else {},
|
||||
wheel_cache=wheel_cache,
|
||||
extras=extras_override or (),
|
||||
)
|
||||
|
||||
|
||||
def install_req_from_line(
|
||||
name, # type: str
|
||||
comes_from=None, # type: Optional[Union[str, InstallRequirement]]
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
isolated=False, # type: bool
|
||||
options=None, # type: Optional[Dict[str, Any]]
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
constraint=False, # type: bool
|
||||
line_source=None, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> InstallRequirement
|
||||
"""Creates an InstallRequirement from a name, which might be a
|
||||
requirement, directory containing 'setup.py', filename, or URL.
|
||||
|
||||
:param line_source: An optional string describing where the line is from,
|
||||
for logging purposes in case of an error.
|
||||
"""
|
||||
if is_url(name):
|
||||
marker_sep = '; '
|
||||
else:
|
||||
marker_sep = ';'
|
||||
if marker_sep in name:
|
||||
name, markers_as_string = name.split(marker_sep, 1)
|
||||
markers_as_string = markers_as_string.strip()
|
||||
if not markers_as_string:
|
||||
markers = None
|
||||
else:
|
||||
markers = Marker(markers_as_string)
|
||||
else:
|
||||
markers = None
|
||||
name = name.strip()
|
||||
req_as_string = None
|
||||
path = os.path.normpath(os.path.abspath(name))
|
||||
link = None
|
||||
extras_as_string = None
|
||||
|
||||
if is_url(name):
|
||||
link = Link(name)
|
||||
else:
|
||||
p, extras_as_string = _strip_extras(path)
|
||||
looks_like_dir = os.path.isdir(p) and (
|
||||
os.path.sep in name or
|
||||
(os.path.altsep is not None and os.path.altsep in name) or
|
||||
name.startswith('.')
|
||||
)
|
||||
if looks_like_dir:
|
||||
if not is_installable_dir(p):
|
||||
raise InstallationError(
|
||||
"Directory %r is not installable. Neither 'setup.py' "
|
||||
"nor 'pyproject.toml' found." % name
|
||||
)
|
||||
link = Link(path_to_url(p))
|
||||
elif is_archive_file(p):
|
||||
if not os.path.isfile(p):
|
||||
logger.warning(
|
||||
'Requirement %r looks like a filename, but the '
|
||||
'file does not exist',
|
||||
name
|
||||
)
|
||||
link = Link(path_to_url(p))
|
||||
|
||||
# it's a local file, dir, or url
|
||||
if link:
|
||||
# Handle relative file URLs
|
||||
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
||||
link = Link(
|
||||
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||
# wheel file
|
||||
if link.is_wheel:
|
||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||
req_as_string = "%s==%s" % (wheel.name, wheel.version)
|
||||
else:
|
||||
# set the req to the egg fragment. when it's not there, this
|
||||
# will become an 'unnamed' requirement
|
||||
req_as_string = link.egg_fragment
|
||||
|
||||
# a requirement specifier
|
||||
else:
|
||||
req_as_string = name
|
||||
|
||||
if extras_as_string:
|
||||
extras = Requirement("placeholder" + extras_as_string.lower()).extras
|
||||
else:
|
||||
extras = ()
|
||||
if req_as_string is not None:
|
||||
try:
|
||||
req = Requirement(req_as_string)
|
||||
except InvalidRequirement:
|
||||
if os.path.sep in req_as_string:
|
||||
add_msg = "It looks like a path."
|
||||
add_msg += deduce_helpful_msg(req_as_string)
|
||||
elif ('=' in req_as_string and
|
||||
not any(op in req_as_string for op in operators)):
|
||||
add_msg = "= is not a valid operator. Did you mean == ?"
|
||||
else:
|
||||
add_msg = ''
|
||||
if line_source is None:
|
||||
source = ''
|
||||
else:
|
||||
source = ' (from {})'.format(line_source)
|
||||
msg = (
|
||||
'Invalid requirement: {!r}{}'.format(req_as_string, source)
|
||||
)
|
||||
if add_msg:
|
||||
msg += '\nHint: {}'.format(add_msg)
|
||||
raise InstallationError(msg)
|
||||
else:
|
||||
req = None
|
||||
|
||||
return InstallRequirement(
|
||||
req, comes_from, link=link, markers=markers,
|
||||
use_pep517=use_pep517, isolated=isolated,
|
||||
options=options if options else {},
|
||||
wheel_cache=wheel_cache,
|
||||
constraint=constraint,
|
||||
extras=extras,
|
||||
)
|
||||
|
||||
|
||||
def install_req_from_req_string(
|
||||
req_string, # type: str
|
||||
comes_from=None, # type: Optional[InstallRequirement]
|
||||
isolated=False, # type: bool
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
use_pep517=None # type: Optional[bool]
|
||||
):
|
||||
# type: (...) -> InstallRequirement
|
||||
try:
|
||||
req = Requirement(req_string)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError("Invalid requirement: '%s'" % req_string)
|
||||
|
||||
domains_not_allowed = [
|
||||
PyPI.file_storage_domain,
|
||||
TestPyPI.file_storage_domain,
|
||||
]
|
||||
if (req.url and comes_from and comes_from.link and
|
||||
comes_from.link.netloc in domains_not_allowed):
|
||||
# Explicitly disallow pypi packages that depend on external urls
|
||||
raise InstallationError(
|
||||
"Packages installed from PyPI cannot depend on packages "
|
||||
"which are not also hosted on PyPI.\n"
|
||||
"%s depends on %s " % (comes_from.name, req)
|
||||
)
|
||||
|
||||
return InstallRequirement(
|
||||
req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
|
||||
use_pep517=use_pep517
|
||||
)
|
399
lib/python3.8/site-packages/pip/_internal/req/req_file.py
Normal file
399
lib/python3.8/site-packages/pip/_internal/req/req_file.py
Normal file
@@ -0,0 +1,399 @@
|
||||
"""
|
||||
Requirements file parsing
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from pip._vendor.six.moves import filterfalse
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.download import get_file_content
|
||||
from pip._internal.exceptions import RequirementsFileParseError
|
||||
from pip._internal.models.search_scope import SearchScope
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable, install_req_from_line,
|
||||
)
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple,
|
||||
)
|
||||
from pip._internal.req import InstallRequirement
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.index import PackageFinder
|
||||
from pip._internal.download import PipSession
|
||||
|
||||
ReqFileLines = Iterator[Tuple[int, Text]]
|
||||
|
||||
__all__ = ['parse_requirements']
|
||||
|
||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||
COMMENT_RE = re.compile(r'(^|\s+)#.*$')
|
||||
|
||||
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||
# variable name consisting of only uppercase letters, digits or the '_'
|
||||
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||
# 2013 Edition.
|
||||
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
||||
|
||||
SUPPORTED_OPTIONS = [
|
||||
cmdoptions.constraints,
|
||||
cmdoptions.editable,
|
||||
cmdoptions.requirements,
|
||||
cmdoptions.no_index,
|
||||
cmdoptions.index_url,
|
||||
cmdoptions.find_links,
|
||||
cmdoptions.extra_index_url,
|
||||
cmdoptions.always_unzip,
|
||||
cmdoptions.no_binary,
|
||||
cmdoptions.only_binary,
|
||||
cmdoptions.pre,
|
||||
cmdoptions.trusted_host,
|
||||
cmdoptions.require_hashes,
|
||||
] # type: List[Callable[..., optparse.Option]]
|
||||
|
||||
# options to be passed to requirements
|
||||
SUPPORTED_OPTIONS_REQ = [
|
||||
cmdoptions.install_options,
|
||||
cmdoptions.global_options,
|
||||
cmdoptions.hash,
|
||||
] # type: List[Callable[..., optparse.Option]]
|
||||
|
||||
# the 'dest' string values
|
||||
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
||||
|
||||
|
||||
def parse_requirements(
|
||||
filename, # type: str
|
||||
finder=None, # type: Optional[PackageFinder]
|
||||
comes_from=None, # type: Optional[str]
|
||||
options=None, # type: Optional[optparse.Values]
|
||||
session=None, # type: Optional[PipSession]
|
||||
constraint=False, # type: bool
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
use_pep517=None # type: Optional[bool]
|
||||
):
|
||||
# type: (...) -> Iterator[InstallRequirement]
|
||||
"""Parse a requirements file and yield InstallRequirement instances.
|
||||
|
||||
:param filename: Path or url of requirements file.
|
||||
:param finder: Instance of pip.index.PackageFinder.
|
||||
:param comes_from: Origin description of requirements.
|
||||
:param options: cli options.
|
||||
:param session: Instance of pip.download.PipSession.
|
||||
:param constraint: If true, parsing a constraint file rather than
|
||||
requirements file.
|
||||
:param wheel_cache: Instance of pip.wheel.WheelCache
|
||||
:param use_pep517: Value of the --use-pep517 option.
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"parse_requirements() missing 1 required keyword argument: "
|
||||
"'session'"
|
||||
)
|
||||
|
||||
_, content = get_file_content(
|
||||
filename, comes_from=comes_from, session=session
|
||||
)
|
||||
|
||||
lines_enum = preprocess(content, options)
|
||||
|
||||
for line_number, line in lines_enum:
|
||||
req_iter = process_line(line, filename, line_number, finder,
|
||||
comes_from, options, session, wheel_cache,
|
||||
use_pep517=use_pep517, constraint=constraint)
|
||||
for req in req_iter:
|
||||
yield req
|
||||
|
||||
|
||||
def preprocess(content, options):
|
||||
# type: (Text, Optional[optparse.Values]) -> ReqFileLines
|
||||
"""Split, filter, and join lines, and return a line iterator
|
||||
|
||||
:param content: the content of the requirements file
|
||||
:param options: cli options
|
||||
"""
|
||||
lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
|
||||
lines_enum = join_lines(lines_enum)
|
||||
lines_enum = ignore_comments(lines_enum)
|
||||
lines_enum = skip_regex(lines_enum, options)
|
||||
lines_enum = expand_env_variables(lines_enum)
|
||||
return lines_enum
|
||||
|
||||
|
||||
def process_line(
|
||||
line, # type: Text
|
||||
filename, # type: str
|
||||
line_number, # type: int
|
||||
finder=None, # type: Optional[PackageFinder]
|
||||
comes_from=None, # type: Optional[str]
|
||||
options=None, # type: Optional[optparse.Values]
|
||||
session=None, # type: Optional[PipSession]
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
constraint=False, # type: bool
|
||||
):
|
||||
# type: (...) -> Iterator[InstallRequirement]
|
||||
"""Process a single requirements line; This can result in creating/yielding
|
||||
requirements, or updating the finder.
|
||||
|
||||
For lines that contain requirements, the only options that have an effect
|
||||
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
||||
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
||||
ignored.
|
||||
|
||||
For lines that do not contain requirements, the only options that have an
|
||||
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
||||
be present, but are ignored. These lines may contain multiple options
|
||||
(although our docs imply only one is supported), and all our parsed and
|
||||
affect the finder.
|
||||
|
||||
:param constraint: If True, parsing a constraints file.
|
||||
:param options: OptionParser options that we may update
|
||||
"""
|
||||
parser = build_parser(line)
|
||||
defaults = parser.get_default_values()
|
||||
defaults.index_url = None
|
||||
if finder:
|
||||
defaults.format_control = finder.format_control
|
||||
args_str, options_str = break_args_options(line)
|
||||
# Prior to 2.7.3, shlex cannot deal with unicode entries
|
||||
if sys.version_info < (2, 7, 3):
|
||||
# https://github.com/python/mypy/issues/1174
|
||||
options_str = options_str.encode('utf8') # type: ignore
|
||||
# https://github.com/python/mypy/issues/1174
|
||||
opts, _ = parser.parse_args(
|
||||
shlex.split(options_str), defaults) # type: ignore
|
||||
|
||||
# preserve for the nested code path
|
||||
line_comes_from = '%s %s (line %s)' % (
|
||||
'-c' if constraint else '-r', filename, line_number,
|
||||
)
|
||||
|
||||
# yield a line requirement
|
||||
if args_str:
|
||||
isolated = options.isolated_mode if options else False
|
||||
if options:
|
||||
cmdoptions.check_install_build_global(options, opts)
|
||||
# get the options that apply to requirements
|
||||
req_options = {}
|
||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||
if dest in opts.__dict__ and opts.__dict__[dest]:
|
||||
req_options[dest] = opts.__dict__[dest]
|
||||
line_source = 'line {} of {}'.format(line_number, filename)
|
||||
yield install_req_from_line(
|
||||
args_str,
|
||||
comes_from=line_comes_from,
|
||||
use_pep517=use_pep517,
|
||||
isolated=isolated,
|
||||
options=req_options,
|
||||
wheel_cache=wheel_cache,
|
||||
constraint=constraint,
|
||||
line_source=line_source,
|
||||
)
|
||||
|
||||
# yield an editable requirement
|
||||
elif opts.editables:
|
||||
isolated = options.isolated_mode if options else False
|
||||
yield install_req_from_editable(
|
||||
opts.editables[0], comes_from=line_comes_from,
|
||||
use_pep517=use_pep517,
|
||||
constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
|
||||
)
|
||||
|
||||
# parse a nested requirements file
|
||||
elif opts.requirements or opts.constraints:
|
||||
if opts.requirements:
|
||||
req_path = opts.requirements[0]
|
||||
nested_constraint = False
|
||||
else:
|
||||
req_path = opts.constraints[0]
|
||||
nested_constraint = True
|
||||
# original file is over http
|
||||
if SCHEME_RE.search(filename):
|
||||
# do a url join so relative paths work
|
||||
req_path = urllib_parse.urljoin(filename, req_path)
|
||||
# original file and nested file are paths
|
||||
elif not SCHEME_RE.search(req_path):
|
||||
# do a join so relative paths work
|
||||
req_path = os.path.join(os.path.dirname(filename), req_path)
|
||||
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
|
||||
parsed_reqs = parse_requirements(
|
||||
req_path, finder, comes_from, options, session,
|
||||
constraint=nested_constraint, wheel_cache=wheel_cache
|
||||
)
|
||||
for req in parsed_reqs:
|
||||
yield req
|
||||
|
||||
# percolate hash-checking option upward
|
||||
elif opts.require_hashes:
|
||||
options.require_hashes = opts.require_hashes
|
||||
|
||||
# set finder options
|
||||
elif finder:
|
||||
find_links = finder.find_links
|
||||
index_urls = finder.index_urls
|
||||
if opts.index_url:
|
||||
index_urls = [opts.index_url]
|
||||
if opts.no_index is True:
|
||||
index_urls = []
|
||||
if opts.extra_index_urls:
|
||||
index_urls.extend(opts.extra_index_urls)
|
||||
if opts.find_links:
|
||||
# FIXME: it would be nice to keep track of the source
|
||||
# of the find_links: support a find-links local path
|
||||
# relative to a requirements file.
|
||||
value = opts.find_links[0]
|
||||
req_dir = os.path.dirname(os.path.abspath(filename))
|
||||
relative_to_reqs_file = os.path.join(req_dir, value)
|
||||
if os.path.exists(relative_to_reqs_file):
|
||||
value = relative_to_reqs_file
|
||||
find_links.append(value)
|
||||
|
||||
search_scope = SearchScope(
|
||||
find_links=find_links,
|
||||
index_urls=index_urls,
|
||||
)
|
||||
finder.search_scope = search_scope
|
||||
|
||||
if opts.pre:
|
||||
finder.set_allow_all_prereleases()
|
||||
for host in opts.trusted_hosts or []:
|
||||
source = 'line {} of {}'.format(line_number, filename)
|
||||
finder.add_trusted_host(host, source=source)
|
||||
|
||||
|
||||
def break_args_options(line):
|
||||
# type: (Text) -> Tuple[str, Text]
|
||||
"""Break up the line into an args and options string. We only want to shlex
|
||||
(and then optparse) the options, not the args. args can contain markers
|
||||
which are corrupted by shlex.
|
||||
"""
|
||||
tokens = line.split(' ')
|
||||
args = []
|
||||
options = tokens[:]
|
||||
for token in tokens:
|
||||
if token.startswith('-') or token.startswith('--'):
|
||||
break
|
||||
else:
|
||||
args.append(token)
|
||||
options.pop(0)
|
||||
return ' '.join(args), ' '.join(options) # type: ignore
|
||||
|
||||
|
||||
def build_parser(line):
|
||||
# type: (Text) -> optparse.OptionParser
|
||||
"""
|
||||
Return a parser for parsing requirement lines
|
||||
"""
|
||||
parser = optparse.OptionParser(add_help_option=False)
|
||||
|
||||
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
||||
for option_factory in option_factories:
|
||||
option = option_factory()
|
||||
parser.add_option(option)
|
||||
|
||||
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||
# that in our own exception.
|
||||
def parser_exit(self, msg):
|
||||
# type: (Any, str) -> NoReturn
|
||||
# add offending line
|
||||
msg = 'Invalid requirement: %s\n%s' % (line, msg)
|
||||
raise RequirementsFileParseError(msg)
|
||||
# NOTE: mypy disallows assigning to a method
|
||||
# https://github.com/python/mypy/issues/2427
|
||||
parser.exit = parser_exit # type: ignore
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def join_lines(lines_enum):
|
||||
# type: (ReqFileLines) -> ReqFileLines
|
||||
"""Joins a line ending in '\' with the previous line (except when following
|
||||
comments). The joined line takes on the index of the first line.
|
||||
"""
|
||||
primary_line_number = None
|
||||
new_line = [] # type: List[Text]
|
||||
for line_number, line in lines_enum:
|
||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||
if COMMENT_RE.match(line):
|
||||
# this ensures comments are always matched later
|
||||
line = ' ' + line
|
||||
if new_line:
|
||||
new_line.append(line)
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
new_line = []
|
||||
else:
|
||||
yield line_number, line
|
||||
else:
|
||||
if not new_line:
|
||||
primary_line_number = line_number
|
||||
new_line.append(line.strip('\\'))
|
||||
|
||||
# last line contains \
|
||||
if new_line:
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
|
||||
# TODO: handle space after '\'.
|
||||
|
||||
|
||||
def ignore_comments(lines_enum):
|
||||
# type: (ReqFileLines) -> ReqFileLines
|
||||
"""
|
||||
Strips comments and filter empty lines.
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
line = COMMENT_RE.sub('', line)
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line_number, line
|
||||
|
||||
|
||||
def skip_regex(lines_enum, options):
|
||||
# type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines
|
||||
"""
|
||||
Skip lines that match '--skip-requirements-regex' pattern
|
||||
|
||||
Note: the regex pattern is only built once
|
||||
"""
|
||||
skip_regex = options.skip_requirements_regex if options else None
|
||||
if skip_regex:
|
||||
pattern = re.compile(skip_regex)
|
||||
lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
|
||||
return lines_enum
|
||||
|
||||
|
||||
def expand_env_variables(lines_enum):
|
||||
# type: (ReqFileLines) -> ReqFileLines
|
||||
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
||||
|
||||
The only allowed format for environment variables defined in the
|
||||
requirement file is `${MY_VARIABLE_1}` to ensure two things:
|
||||
|
||||
1. Strings that contain a `$` aren't accidentally (partially) expanded.
|
||||
2. Ensure consistency across platforms for requirement files.
|
||||
|
||||
These points are the result of a discussion on the `github pull
|
||||
request #3514 <https://github.com/pypa/pip/pull/3514>`_.
|
||||
|
||||
Valid characters in variable names follow the `POSIX standard
|
||||
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
|
||||
to uppercase letter, digits and the `_` (underscore).
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
for env_var, var_name in ENV_VAR_RE.findall(line):
|
||||
value = os.getenv(var_name)
|
||||
if not value:
|
||||
continue
|
||||
|
||||
line = line.replace(env_var, value)
|
||||
|
||||
yield line_number, line
|
1035
lib/python3.8/site-packages/pip/_internal/req/req_install.py
Normal file
1035
lib/python3.8/site-packages/pip/_internal/req/req_install.py
Normal file
File diff suppressed because it is too large
Load Diff
193
lib/python3.8/site-packages/pip/_internal/req/req_set.py
Normal file
193
lib/python3.8/site-packages/pip/_internal/req/req_set.py
Normal file
@@ -0,0 +1,193 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.wheel import Wheel
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequirementSet(object):
|
||||
|
||||
def __init__(self, require_hashes=False, check_supported_wheels=True):
|
||||
# type: (bool, bool) -> None
|
||||
"""Create a RequirementSet.
|
||||
"""
|
||||
|
||||
self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
|
||||
self.require_hashes = require_hashes
|
||||
self.check_supported_wheels = check_supported_wheels
|
||||
|
||||
# Mapping of alias: real_name
|
||||
self.requirement_aliases = {} # type: Dict[str, str]
|
||||
self.unnamed_requirements = [] # type: List[InstallRequirement]
|
||||
self.successfully_downloaded = [] # type: List[InstallRequirement]
|
||||
self.reqs_to_cleanup = [] # type: List[InstallRequirement]
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
reqs = [req for req in self.requirements.values()
|
||||
if not req.comes_from]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
return ' '.join([str(req.req) for req in reqs])
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
reqs = [req for req in self.requirements.values()]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
reqs_str = ', '.join([str(req.req) for req in reqs])
|
||||
return ('<%s object; %d requirement(s): %s>'
|
||||
% (self.__class__.__name__, len(reqs), reqs_str))
|
||||
|
||||
def add_requirement(
|
||||
self,
|
||||
install_req, # type: InstallRequirement
|
||||
parent_req_name=None, # type: Optional[str]
|
||||
extras_requested=None # type: Optional[Iterable[str]]
|
||||
):
|
||||
# type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
|
||||
"""Add install_req as a requirement to install.
|
||||
|
||||
:param parent_req_name: The name of the requirement that needed this
|
||||
added. The name is used because when multiple unnamed requirements
|
||||
resolve to the same name, we could otherwise end up with dependency
|
||||
links that point outside the Requirements set. parent_req must
|
||||
already be added. Note that None implies that this is a user
|
||||
supplied requirement, vs an inferred one.
|
||||
:param extras_requested: an iterable of extras used to evaluate the
|
||||
environment markers.
|
||||
:return: Additional requirements to scan. That is either [] if
|
||||
the requirement is not applicable, or [install_req] if the
|
||||
requirement is applicable and has just been added.
|
||||
"""
|
||||
name = install_req.name
|
||||
|
||||
# If the markers do not match, ignore this requirement.
|
||||
if not install_req.match_markers(extras_requested):
|
||||
logger.info(
|
||||
"Ignoring %s: markers '%s' don't match your environment",
|
||||
name, install_req.markers,
|
||||
)
|
||||
return [], None
|
||||
|
||||
# If the wheel is not supported, raise an error.
|
||||
# Should check this after filtering out based on environment markers to
|
||||
# allow specifying different wheels based on the environment/OS, in a
|
||||
# single requirements file.
|
||||
if install_req.link and install_req.link.is_wheel:
|
||||
wheel = Wheel(install_req.link.filename)
|
||||
if self.check_supported_wheels and not wheel.supported():
|
||||
raise InstallationError(
|
||||
"%s is not a supported wheel on this platform." %
|
||||
wheel.filename
|
||||
)
|
||||
|
||||
# This next bit is really a sanity check.
|
||||
assert install_req.is_direct == (parent_req_name is None), (
|
||||
"a direct req shouldn't have a parent and also, "
|
||||
"a non direct req should have a parent"
|
||||
)
|
||||
|
||||
# Unnamed requirements are scanned again and the requirement won't be
|
||||
# added as a dependency until after scanning.
|
||||
if not name:
|
||||
# url or path requirement w/o an egg fragment
|
||||
self.unnamed_requirements.append(install_req)
|
||||
return [install_req], None
|
||||
|
||||
try:
|
||||
existing_req = self.get_requirement(name)
|
||||
except KeyError:
|
||||
existing_req = None
|
||||
|
||||
has_conflicting_requirement = (
|
||||
parent_req_name is None and
|
||||
existing_req and
|
||||
not existing_req.constraint and
|
||||
existing_req.extras == install_req.extras and
|
||||
existing_req.req.specifier != install_req.req.specifier
|
||||
)
|
||||
if has_conflicting_requirement:
|
||||
raise InstallationError(
|
||||
"Double requirement given: %s (already in %s, name=%r)"
|
||||
% (install_req, existing_req, name)
|
||||
)
|
||||
|
||||
# When no existing requirement exists, add the requirement as a
|
||||
# dependency and it will be scanned again after.
|
||||
if not existing_req:
|
||||
self.requirements[name] = install_req
|
||||
# FIXME: what about other normalizations? E.g., _ vs. -?
|
||||
if name.lower() != name:
|
||||
self.requirement_aliases[name.lower()] = name
|
||||
# We'd want to rescan this requirements later
|
||||
return [install_req], install_req
|
||||
|
||||
# Assume there's no need to scan, and that we've already
|
||||
# encountered this for scanning.
|
||||
if install_req.constraint or not existing_req.constraint:
|
||||
return [], existing_req
|
||||
|
||||
does_not_satisfy_constraint = (
|
||||
install_req.link and
|
||||
not (
|
||||
existing_req.link and
|
||||
install_req.link.path == existing_req.link.path
|
||||
)
|
||||
)
|
||||
if does_not_satisfy_constraint:
|
||||
self.reqs_to_cleanup.append(install_req)
|
||||
raise InstallationError(
|
||||
"Could not satisfy constraints for '%s': "
|
||||
"installation from path or url cannot be "
|
||||
"constrained to a version" % name,
|
||||
)
|
||||
# If we're now installing a constraint, mark the existing
|
||||
# object for real installation.
|
||||
existing_req.constraint = False
|
||||
existing_req.extras = tuple(sorted(
|
||||
set(existing_req.extras) | set(install_req.extras)
|
||||
))
|
||||
logger.debug(
|
||||
"Setting %s extras to: %s",
|
||||
existing_req, existing_req.extras,
|
||||
)
|
||||
# Return the existing requirement for addition to the parent and
|
||||
# scanning again.
|
||||
return [existing_req], existing_req
|
||||
|
||||
def has_requirement(self, project_name):
|
||||
# type: (str) -> bool
|
||||
name = project_name.lower()
|
||||
if (name in self.requirements and
|
||||
not self.requirements[name].constraint or
|
||||
name in self.requirement_aliases and
|
||||
not self.requirements[self.requirement_aliases[name]].constraint):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_requirement(self, project_name):
|
||||
# type: (str) -> InstallRequirement
|
||||
for name in project_name, project_name.lower():
|
||||
if name in self.requirements:
|
||||
return self.requirements[name]
|
||||
if name in self.requirement_aliases:
|
||||
return self.requirements[self.requirement_aliases[name]]
|
||||
raise KeyError("No project with the name %r" % project_name)
|
||||
|
||||
def cleanup_files(self):
|
||||
# type: () -> None
|
||||
"""Clean up files, remove builds."""
|
||||
logger.debug('Cleaning up...')
|
||||
with indent_log():
|
||||
for req in self.reqs_to_cleanup:
|
||||
req.remove_temporary_source()
|
96
lib/python3.8/site-packages/pip/_internal/req/req_tracker.py
Normal file
96
lib/python3.8/site-packages/pip/_internal/req/req_tracker.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from types import TracebackType
|
||||
from typing import Iterator, Optional, Set, Type
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.models.link import Link
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequirementTracker(object):
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self._root = os.environ.get('PIP_REQ_TRACKER')
|
||||
if self._root is None:
|
||||
self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
|
||||
self._temp_dir.create()
|
||||
self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
|
||||
logger.debug('Created requirements tracker %r', self._root)
|
||||
else:
|
||||
self._temp_dir = None
|
||||
logger.debug('Re-using requirements tracker %r', self._root)
|
||||
self._entries = set() # type: Set[InstallRequirement]
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> RequirementTracker
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type, # type: Optional[Type[BaseException]]
|
||||
exc_val, # type: Optional[BaseException]
|
||||
exc_tb # type: Optional[TracebackType]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link):
|
||||
# type: (Link) -> str
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
link = req.link
|
||||
info = str(req)
|
||||
entry_path = self._entry_path(link)
|
||||
try:
|
||||
with open(entry_path) as fp:
|
||||
# Error, these's already a build in progress.
|
||||
raise LookupError('%s is already being built: %s'
|
||||
% (link, fp.read()))
|
||||
except IOError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
assert req not in self._entries
|
||||
with open(entry_path, 'w') as fp:
|
||||
fp.write(info)
|
||||
self._entries.add(req)
|
||||
logger.debug('Added %s to build tracker %r', req, self._root)
|
||||
|
||||
def remove(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
link = req.link
|
||||
self._entries.remove(req)
|
||||
os.unlink(self._entry_path(link))
|
||||
logger.debug('Removed %s from build tracker %r', req, self._root)
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
remove = self._temp_dir is not None
|
||||
if remove:
|
||||
self._temp_dir.cleanup()
|
||||
logger.debug('%s build tracker %r',
|
||||
'Removed' if remove else 'Cleaned',
|
||||
self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req):
|
||||
# type: (InstallRequirement) -> Iterator[None]
|
||||
self.add(req)
|
||||
yield
|
||||
self.remove(req)
|
633
lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py
Normal file
633
lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py
Normal file
@@ -0,0 +1,633 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import csv
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
|
||||
from pip._internal.exceptions import UninstallationError
|
||||
from pip._internal.locations import bin_py, bin_user
|
||||
from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
|
||||
normalize_path, renames, rmtree,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
|
||||
)
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _script_names(dist, script_name, is_gui):
|
||||
# type: (Distribution, str, bool) -> List[str]
|
||||
"""Create the fully qualified name of the files created by
|
||||
{console,gui}_scripts for the given ``dist``.
|
||||
Returns the list of file names
|
||||
"""
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = bin_user
|
||||
else:
|
||||
bin_dir = bin_py
|
||||
exe_name = os.path.join(bin_dir, script_name)
|
||||
paths_to_remove = [exe_name]
|
||||
if WINDOWS:
|
||||
paths_to_remove.append(exe_name + '.exe')
|
||||
paths_to_remove.append(exe_name + '.exe.manifest')
|
||||
if is_gui:
|
||||
paths_to_remove.append(exe_name + '-script.pyw')
|
||||
else:
|
||||
paths_to_remove.append(exe_name + '-script.py')
|
||||
return paths_to_remove
|
||||
|
||||
|
||||
def _unique(fn):
|
||||
# type: (Callable) -> Callable[..., Iterator[Any]]
|
||||
@functools.wraps(fn)
|
||||
def unique(*args, **kw):
|
||||
# type: (Any, Any) -> Iterator[Any]
|
||||
seen = set() # type: Set[Any]
|
||||
for item in fn(*args, **kw):
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
yield item
|
||||
return unique
|
||||
|
||||
|
||||
@_unique
|
||||
def uninstallation_paths(dist):
|
||||
# type: (Distribution) -> Iterator[str]
|
||||
"""
|
||||
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
||||
|
||||
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
||||
the .pyc and .pyo in the same directory.
|
||||
|
||||
UninstallPathSet.add() takes care of the __pycache__ .py[co].
|
||||
"""
|
||||
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||
for row in r:
|
||||
path = os.path.join(dist.location, row[0])
|
||||
yield path
|
||||
if path.endswith('.py'):
|
||||
dn, fn = os.path.split(path)
|
||||
base = fn[:-3]
|
||||
path = os.path.join(dn, base + '.pyc')
|
||||
yield path
|
||||
path = os.path.join(dn, base + '.pyo')
|
||||
yield path
|
||||
|
||||
|
||||
def compact(paths):
|
||||
# type: (Iterable[str]) -> Set[str]
|
||||
"""Compact a path set to contain the minimal number of paths
|
||||
necessary to contain all paths in the set. If /a/path/ and
|
||||
/a/path/to/a/file.txt are both in the set, leave only the
|
||||
shorter path."""
|
||||
|
||||
sep = os.path.sep
|
||||
short_paths = set() # type: Set[str]
|
||||
for path in sorted(paths, key=len):
|
||||
should_skip = any(
|
||||
path.startswith(shortpath.rstrip("*")) and
|
||||
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||
for shortpath in short_paths
|
||||
)
|
||||
if not should_skip:
|
||||
short_paths.add(path)
|
||||
return short_paths
|
||||
|
||||
|
||||
def compress_for_rename(paths):
|
||||
# type: (Iterable[str]) -> Set[str]
|
||||
"""Returns a set containing the paths that need to be renamed.
|
||||
|
||||
This set may include directories when the original sequence of paths
|
||||
included every file on disk.
|
||||
"""
|
||||
case_map = dict((os.path.normcase(p), p) for p in paths)
|
||||
remaining = set(case_map)
|
||||
unchecked = sorted(set(os.path.split(p)[0]
|
||||
for p in case_map.values()), key=len)
|
||||
wildcards = set() # type: Set[str]
|
||||
|
||||
def norm_join(*a):
|
||||
# type: (str) -> str
|
||||
return os.path.normcase(os.path.join(*a))
|
||||
|
||||
for root in unchecked:
|
||||
if any(os.path.normcase(root).startswith(w)
|
||||
for w in wildcards):
|
||||
# This directory has already been handled.
|
||||
continue
|
||||
|
||||
all_files = set() # type: Set[str]
|
||||
all_subdirs = set() # type: Set[str]
|
||||
for dirname, subdirs, files in os.walk(root):
|
||||
all_subdirs.update(norm_join(root, dirname, d)
|
||||
for d in subdirs)
|
||||
all_files.update(norm_join(root, dirname, f)
|
||||
for f in files)
|
||||
# If all the files we found are in our remaining set of files to
|
||||
# remove, then remove them from the latter set and add a wildcard
|
||||
# for the directory.
|
||||
if not (all_files - remaining):
|
||||
remaining.difference_update(all_files)
|
||||
wildcards.add(root + os.sep)
|
||||
|
||||
return set(map(case_map.__getitem__, remaining)) | wildcards
|
||||
|
||||
|
||||
def compress_for_output_listing(paths):
|
||||
# type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
|
||||
"""Returns a tuple of 2 sets of which paths to display to user
|
||||
|
||||
The first set contains paths that would be deleted. Files of a package
|
||||
are not added and the top-level directory of the package has a '*' added
|
||||
at the end - to signify that all it's contents are removed.
|
||||
|
||||
The second set contains files that would have been skipped in the above
|
||||
folders.
|
||||
"""
|
||||
|
||||
will_remove = set(paths)
|
||||
will_skip = set()
|
||||
|
||||
# Determine folders and files
|
||||
folders = set()
|
||||
files = set()
|
||||
for path in will_remove:
|
||||
if path.endswith(".pyc"):
|
||||
continue
|
||||
if path.endswith("__init__.py") or ".dist-info" in path:
|
||||
folders.add(os.path.dirname(path))
|
||||
files.add(path)
|
||||
|
||||
# probably this one https://github.com/python/mypy/issues/390
|
||||
_normcased_files = set(map(os.path.normcase, files)) # type: ignore
|
||||
|
||||
folders = compact(folders)
|
||||
|
||||
# This walks the tree using os.walk to not miss extra folders
|
||||
# that might get added.
|
||||
for folder in folders:
|
||||
for dirpath, _, dirfiles in os.walk(folder):
|
||||
for fname in dirfiles:
|
||||
if fname.endswith(".pyc"):
|
||||
continue
|
||||
|
||||
file_ = os.path.join(dirpath, fname)
|
||||
if (os.path.isfile(file_) and
|
||||
os.path.normcase(file_) not in _normcased_files):
|
||||
# We are skipping this file. Add it to the set.
|
||||
will_skip.add(file_)
|
||||
|
||||
will_remove = files | {
|
||||
os.path.join(folder, "*") for folder in folders
|
||||
}
|
||||
|
||||
return will_remove, will_skip
|
||||
|
||||
|
||||
class StashedUninstallPathSet(object):
|
||||
"""A set of file rename operations to stash files while
|
||||
tentatively uninstalling them."""
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
# Mapping from source file root to [Adjacent]TempDirectory
|
||||
# for files under that directory.
|
||||
self._save_dirs = {} # type: Dict[str, TempDirectory]
|
||||
# (old path, new path) tuples for each move that may need
|
||||
# to be undone.
|
||||
self._moves = [] # type: List[Tuple[str, str]]
|
||||
|
||||
def _get_directory_stash(self, path):
|
||||
# type: (str) -> str
|
||||
"""Stashes a directory.
|
||||
|
||||
Directories are stashed adjacent to their original location if
|
||||
possible, or else moved/copied into the user's temp dir."""
|
||||
|
||||
try:
|
||||
save_dir = AdjacentTempDirectory(path) # type: TempDirectory
|
||||
save_dir.create()
|
||||
except OSError:
|
||||
save_dir = TempDirectory(kind="uninstall")
|
||||
save_dir.create()
|
||||
self._save_dirs[os.path.normcase(path)] = save_dir
|
||||
|
||||
return save_dir.path
|
||||
|
||||
def _get_file_stash(self, path):
|
||||
# type: (str) -> str
|
||||
"""Stashes a file.
|
||||
|
||||
If no root has been provided, one will be created for the directory
|
||||
in the user's temp directory."""
|
||||
path = os.path.normcase(path)
|
||||
head, old_head = os.path.dirname(path), None
|
||||
save_dir = None
|
||||
|
||||
while head != old_head:
|
||||
try:
|
||||
save_dir = self._save_dirs[head]
|
||||
break
|
||||
except KeyError:
|
||||
pass
|
||||
head, old_head = os.path.dirname(head), head
|
||||
else:
|
||||
# Did not find any suitable root
|
||||
head = os.path.dirname(path)
|
||||
save_dir = TempDirectory(kind='uninstall')
|
||||
save_dir.create()
|
||||
self._save_dirs[head] = save_dir
|
||||
|
||||
relpath = os.path.relpath(path, head)
|
||||
if relpath and relpath != os.path.curdir:
|
||||
return os.path.join(save_dir.path, relpath)
|
||||
return save_dir.path
|
||||
|
||||
def stash(self, path):
|
||||
# type: (str) -> str
|
||||
"""Stashes the directory or file and returns its new location.
|
||||
"""
|
||||
if os.path.isdir(path):
|
||||
new_path = self._get_directory_stash(path)
|
||||
else:
|
||||
new_path = self._get_file_stash(path)
|
||||
|
||||
self._moves.append((path, new_path))
|
||||
if os.path.isdir(path) and os.path.isdir(new_path):
|
||||
# If we're moving a directory, we need to
|
||||
# remove the destination first or else it will be
|
||||
# moved to inside the existing directory.
|
||||
# We just created new_path ourselves, so it will
|
||||
# be removable.
|
||||
os.rmdir(new_path)
|
||||
renames(path, new_path)
|
||||
return new_path
|
||||
|
||||
def commit(self):
|
||||
# type: () -> None
|
||||
"""Commits the uninstall by removing stashed files."""
|
||||
for _, save_dir in self._save_dirs.items():
|
||||
save_dir.cleanup()
|
||||
self._moves = []
|
||||
self._save_dirs = {}
|
||||
|
||||
def rollback(self):
|
||||
# type: () -> None
|
||||
"""Undoes the uninstall by moving stashed files back."""
|
||||
for p in self._moves:
|
||||
logging.info("Moving to %s\n from %s", *p)
|
||||
|
||||
for new_path, path in self._moves:
|
||||
try:
|
||||
logger.debug('Replacing %s from %s', new_path, path)
|
||||
if os.path.isfile(new_path):
|
||||
os.unlink(new_path)
|
||||
elif os.path.isdir(new_path):
|
||||
rmtree(new_path)
|
||||
renames(path, new_path)
|
||||
except OSError as ex:
|
||||
logger.error("Failed to restore %s", new_path)
|
||||
logger.debug("Exception: %s", ex)
|
||||
|
||||
self.commit()
|
||||
|
||||
@property
|
||||
def can_rollback(self):
|
||||
# type: () -> bool
|
||||
return bool(self._moves)
|
||||
|
||||
|
||||
class UninstallPathSet(object):
|
||||
"""A set of file paths to be removed in the uninstallation of a
|
||||
requirement."""
|
||||
def __init__(self, dist):
|
||||
# type: (Distribution) -> None
|
||||
self.paths = set() # type: Set[str]
|
||||
self._refuse = set() # type: Set[str]
|
||||
self.pth = {} # type: Dict[str, UninstallPthEntries]
|
||||
self.dist = dist
|
||||
self._moved_paths = StashedUninstallPathSet()
|
||||
|
||||
def _permitted(self, path):
|
||||
# type: (str) -> bool
|
||||
"""
|
||||
Return True if the given path is one we are permitted to
|
||||
remove/modify, False otherwise.
|
||||
|
||||
"""
|
||||
return is_local(path)
|
||||
|
||||
def add(self, path):
|
||||
# type: (str) -> None
|
||||
head, tail = os.path.split(path)
|
||||
|
||||
# we normalize the head to resolve parent directory symlinks, but not
|
||||
# the tail, since we only want to uninstall symlinks, not their targets
|
||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
if self._permitted(path):
|
||||
self.paths.add(path)
|
||||
else:
|
||||
self._refuse.add(path)
|
||||
|
||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||
# due to imports
|
||||
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
||||
self.add(cache_from_source(path))
|
||||
|
||||
def add_pth(self, pth_file, entry):
|
||||
# type: (str, str) -> None
|
||||
pth_file = normalize_path(pth_file)
|
||||
if self._permitted(pth_file):
|
||||
if pth_file not in self.pth:
|
||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||
self.pth[pth_file].add(entry)
|
||||
else:
|
||||
self._refuse.add(pth_file)
|
||||
|
||||
def remove(self, auto_confirm=False, verbose=False):
|
||||
# type: (bool, bool) -> None
|
||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||
``auto_confirm`` is True)."""
|
||||
|
||||
if not self.paths:
|
||||
logger.info(
|
||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return
|
||||
|
||||
dist_name_version = (
|
||||
self.dist.project_name + "-" + self.dist.version
|
||||
)
|
||||
logger.info('Uninstalling %s:', dist_name_version)
|
||||
|
||||
with indent_log():
|
||||
if auto_confirm or self._allowed_to_proceed(verbose):
|
||||
moved = self._moved_paths
|
||||
|
||||
for_rename = compress_for_rename(self.paths)
|
||||
|
||||
for path in sorted(compact(for_rename)):
|
||||
moved.stash(path)
|
||||
logger.debug('Removing file or directory %s', path)
|
||||
|
||||
for pth in self.pth.values():
|
||||
pth.remove()
|
||||
|
||||
logger.info('Successfully uninstalled %s', dist_name_version)
|
||||
|
||||
def _allowed_to_proceed(self, verbose):
|
||||
# type: (bool) -> bool
|
||||
"""Display which files would be deleted and prompt for confirmation
|
||||
"""
|
||||
|
||||
def _display(msg, paths):
|
||||
# type: (str, Iterable[str]) -> None
|
||||
if not paths:
|
||||
return
|
||||
|
||||
logger.info(msg)
|
||||
with indent_log():
|
||||
for path in sorted(compact(paths)):
|
||||
logger.info(path)
|
||||
|
||||
if not verbose:
|
||||
will_remove, will_skip = compress_for_output_listing(self.paths)
|
||||
else:
|
||||
# In verbose mode, display all the files that are going to be
|
||||
# deleted.
|
||||
will_remove = set(self.paths)
|
||||
will_skip = set()
|
||||
|
||||
_display('Would remove:', will_remove)
|
||||
_display('Would not remove (might be manually added):', will_skip)
|
||||
_display('Would not remove (outside of prefix):', self._refuse)
|
||||
if verbose:
|
||||
_display('Will actually move:', compress_for_rename(self.paths))
|
||||
|
||||
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
|
||||
|
||||
def rollback(self):
|
||||
# type: () -> None
|
||||
"""Rollback the changes previously made by remove()."""
|
||||
if not self._moved_paths.can_rollback:
|
||||
logger.error(
|
||||
"Can't roll back %s; was not uninstalled",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return
|
||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||
self._moved_paths.rollback()
|
||||
for pth in self.pth.values():
|
||||
pth.rollback()
|
||||
|
||||
def commit(self):
|
||||
# type: () -> None
|
||||
"""Remove temporary save dir: rollback will no longer be possible."""
|
||||
self._moved_paths.commit()
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist):
|
||||
# type: (Distribution) -> UninstallPathSet
|
||||
dist_path = normalize_path(dist.location)
|
||||
if not dist_is_local(dist):
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, outside environment %s",
|
||||
dist.key,
|
||||
dist_path,
|
||||
sys.prefix,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
||||
sysconfig.get_path("platstdlib")}
|
||||
if p}:
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, as it is in the standard library.",
|
||||
dist.key,
|
||||
dist_path,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
paths_to_remove = cls(dist)
|
||||
develop_egg_link = egg_link_path(dist)
|
||||
develop_egg_link_egg_info = '{}.egg-info'.format(
|
||||
pkg_resources.to_filename(dist.project_name))
|
||||
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
||||
# Special case for distutils installed package
|
||||
distutils_egg_info = getattr(dist._provider, 'path', None)
|
||||
|
||||
# Uninstall cases order do matter as in the case of 2 installs of the
|
||||
# same package, pip needs to uninstall the currently detected version
|
||||
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
||||
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
||||
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
||||
# are in fact in the develop_egg_link case
|
||||
paths_to_remove.add(dist.egg_info)
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
for installed_file in dist.get_metadata(
|
||||
'installed-files.txt').splitlines():
|
||||
path = os.path.normpath(
|
||||
os.path.join(dist.egg_info, installed_file)
|
||||
)
|
||||
paths_to_remove.add(path)
|
||||
# FIXME: need a test for this elif block
|
||||
# occurs with --single-version-externally-managed/--record outside
|
||||
# of pip
|
||||
elif dist.has_metadata('top_level.txt'):
|
||||
if dist.has_metadata('namespace_packages.txt'):
|
||||
namespaces = dist.get_metadata('namespace_packages.txt')
|
||||
else:
|
||||
namespaces = []
|
||||
for top_level_pkg in [
|
||||
p for p
|
||||
in dist.get_metadata('top_level.txt').splitlines()
|
||||
if p and p not in namespaces]:
|
||||
path = os.path.join(dist.location, top_level_pkg)
|
||||
paths_to_remove.add(path)
|
||||
paths_to_remove.add(path + '.py')
|
||||
paths_to_remove.add(path + '.pyc')
|
||||
paths_to_remove.add(path + '.pyo')
|
||||
|
||||
elif distutils_egg_info:
|
||||
raise UninstallationError(
|
||||
"Cannot uninstall {!r}. It is a distutils installed project "
|
||||
"and thus we cannot accurately determine which files belong "
|
||||
"to it which would lead to only a partial uninstall.".format(
|
||||
dist.project_name,
|
||||
)
|
||||
)
|
||||
|
||||
elif dist.location.endswith('.egg'):
|
||||
# package installed by easy_install
|
||||
# We cannot match on dist.egg_name because it can slightly vary
|
||||
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
||||
paths_to_remove.add(dist.location)
|
||||
easy_install_egg = os.path.split(dist.location)[1]
|
||||
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
||||
|
||||
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
||||
for path in uninstallation_paths(dist):
|
||||
paths_to_remove.add(path)
|
||||
|
||||
elif develop_egg_link:
|
||||
# develop egg
|
||||
with open(develop_egg_link, 'r') as fh:
|
||||
link_pointer = os.path.normcase(fh.readline().strip())
|
||||
assert (link_pointer == dist.location), (
|
||||
'Egg-link %s does not match installed location of %s '
|
||||
'(at %s)' % (link_pointer, dist.project_name, dist.location)
|
||||
)
|
||||
paths_to_remove.add(develop_egg_link)
|
||||
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
||||
|
||||
else:
|
||||
logger.debug(
|
||||
'Not sure how to uninstall: %s - Check: %s',
|
||||
dist, dist.location,
|
||||
)
|
||||
|
||||
# find distutils scripts= scripts
|
||||
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
||||
for script in dist.metadata_listdir('scripts'):
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = bin_user
|
||||
else:
|
||||
bin_dir = bin_py
|
||||
paths_to_remove.add(os.path.join(bin_dir, script))
|
||||
if WINDOWS:
|
||||
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
||||
|
||||
# find console_scripts
|
||||
_scripts_to_remove = []
|
||||
console_scripts = dist.get_entry_map(group='console_scripts')
|
||||
for name in console_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, False))
|
||||
# find gui_scripts
|
||||
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
||||
for name in gui_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, True))
|
||||
|
||||
for s in _scripts_to_remove:
|
||||
paths_to_remove.add(s)
|
||||
|
||||
return paths_to_remove
|
||||
|
||||
|
||||
class UninstallPthEntries(object):
|
||||
def __init__(self, pth_file):
|
||||
# type: (str) -> None
|
||||
if not os.path.isfile(pth_file):
|
||||
raise UninstallationError(
|
||||
"Cannot remove entries from nonexistent file %s" % pth_file
|
||||
)
|
||||
self.file = pth_file
|
||||
self.entries = set() # type: Set[str]
|
||||
self._saved_lines = None # type: Optional[List[bytes]]
|
||||
|
||||
def add(self, entry):
|
||||
# type: (str) -> None
|
||||
entry = os.path.normcase(entry)
|
||||
# On Windows, os.path.normcase converts the entry to use
|
||||
# backslashes. This is correct for entries that describe absolute
|
||||
# paths outside of site-packages, but all the others use forward
|
||||
# slashes.
|
||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||
entry = entry.replace('\\', '/')
|
||||
self.entries.add(entry)
|
||||
|
||||
def remove(self):
|
||||
# type: () -> None
|
||||
logger.debug('Removing pth entries from %s:', self.file)
|
||||
with open(self.file, 'rb') as fh:
|
||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||
lines = fh.readlines()
|
||||
self._saved_lines = lines
|
||||
if any(b'\r\n' in line for line in lines):
|
||||
endline = '\r\n'
|
||||
else:
|
||||
endline = '\n'
|
||||
# handle missing trailing newline
|
||||
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
||||
lines[-1] = lines[-1] + endline.encode("utf-8")
|
||||
for entry in self.entries:
|
||||
try:
|
||||
logger.debug('Removing entry: %s', entry)
|
||||
lines.remove((entry + endline).encode("utf-8"))
|
||||
except ValueError:
|
||||
pass
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(lines)
|
||||
|
||||
def rollback(self):
|
||||
# type: () -> bool
|
||||
if self._saved_lines is None:
|
||||
logger.error(
|
||||
'Cannot roll back changes to %s, none were made', self.file
|
||||
)
|
||||
return False
|
||||
logger.debug('Rolling %s back to previous state', self.file)
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(self._saved_lines)
|
||||
return True
|
Reference in New Issue
Block a user