Removed the Requirement to Install Python and NodeJS (Now Bundled with Borealis)
This commit is contained in:
18
Dependencies/Python/Lib/site-packages/pip/_internal/__init__.py
vendored
Normal file
18
Dependencies/Python/Lib/site-packages/pip/_internal/__init__.py
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.utils import _log
|
||||
|
||||
# init_logging() must be called before any call to logging.getLogger()
|
||||
# which happens at import of most modules.
|
||||
_log.init_logging()
|
||||
|
||||
|
||||
def main(args: Optional[List[str]] = None) -> int:
|
||||
"""This is preserved for old console scripts that may still be referencing
|
||||
it.
|
||||
|
||||
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||
"""
|
||||
from pip._internal.utils.entrypoints import _wrapper
|
||||
|
||||
return _wrapper(args)
|
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/main.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/main.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-313.pyc
vendored
Normal file
Binary file not shown.
322
Dependencies/Python/Lib/site-packages/pip/_internal/build_env.py
vendored
Normal file
322
Dependencies/Python/Lib/site-packages/pip/_internal/build_env.py
vendored
Normal file
@ -0,0 +1,322 @@
|
||||
"""Build Environment used for isolation during sdist building
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import site
|
||||
import sys
|
||||
import textwrap
|
||||
from collections import OrderedDict
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
|
||||
|
||||
from pip._vendor.packaging.version import Version
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.cli.spinners import open_spinner
|
||||
from pip._internal.locations import get_platlib, get_purelib, get_scheme
|
||||
from pip._internal.metadata import get_default_environment, get_environment
|
||||
from pip._internal.utils.logging import VERBOSE
|
||||
from pip._internal.utils.packaging import get_requirement
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
|
||||
return (a, b) if a != b else (a,)
|
||||
|
||||
|
||||
class _Prefix:
|
||||
def __init__(self, path: str) -> None:
|
||||
self.path = path
|
||||
self.setup = False
|
||||
scheme = get_scheme("", prefix=path)
|
||||
self.bin_dir = scheme.scripts
|
||||
self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
|
||||
|
||||
|
||||
def get_runnable_pip() -> str:
|
||||
"""Get a file to pass to a Python executable, to run the currently-running pip.
|
||||
|
||||
This is used to run a pip subprocess, for installing requirements into the build
|
||||
environment.
|
||||
"""
|
||||
source = pathlib.Path(pip_location).resolve().parent
|
||||
|
||||
if not source.is_dir():
|
||||
# This would happen if someone is using pip from inside a zip file. In that
|
||||
# case, we can use that directly.
|
||||
return str(source)
|
||||
|
||||
return os.fsdecode(source / "__pip-runner__.py")
|
||||
|
||||
|
||||
def _get_system_sitepackages() -> Set[str]:
|
||||
"""Get system site packages
|
||||
|
||||
Usually from site.getsitepackages,
|
||||
but fallback on `get_purelib()/get_platlib()` if unavailable
|
||||
(e.g. in a virtualenv created by virtualenv<20)
|
||||
|
||||
Returns normalized set of strings.
|
||||
"""
|
||||
if hasattr(site, "getsitepackages"):
|
||||
system_sites = site.getsitepackages()
|
||||
else:
|
||||
# virtualenv < 20 overwrites site.py without getsitepackages
|
||||
# fallback on get_purelib/get_platlib.
|
||||
# this is known to miss things, but shouldn't in the cases
|
||||
# where getsitepackages() has been removed (inside a virtualenv)
|
||||
system_sites = [get_purelib(), get_platlib()]
|
||||
return {os.path.normcase(path) for path in system_sites}
|
||||
|
||||
|
||||
class BuildEnvironment:
|
||||
"""Creates and manages an isolated environment to install build deps"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
|
||||
|
||||
self._prefixes = OrderedDict(
|
||||
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
||||
for name in ("normal", "overlay")
|
||||
)
|
||||
|
||||
self._bin_dirs: List[str] = []
|
||||
self._lib_dirs: List[str] = []
|
||||
for prefix in reversed(list(self._prefixes.values())):
|
||||
self._bin_dirs.append(prefix.bin_dir)
|
||||
self._lib_dirs.extend(prefix.lib_dirs)
|
||||
|
||||
# Customize site to:
|
||||
# - ensure .pth files are honored
|
||||
# - prevent access to system site packages
|
||||
system_sites = _get_system_sitepackages()
|
||||
|
||||
self._site_dir = os.path.join(temp_dir.path, "site")
|
||||
if not os.path.exists(self._site_dir):
|
||||
os.mkdir(self._site_dir)
|
||||
with open(
|
||||
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
|
||||
) as fp:
|
||||
fp.write(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import os, site, sys
|
||||
|
||||
# First, drop system-sites related paths.
|
||||
original_sys_path = sys.path[:]
|
||||
known_paths = set()
|
||||
for path in {system_sites!r}:
|
||||
site.addsitedir(path, known_paths=known_paths)
|
||||
system_paths = set(
|
||||
os.path.normcase(path)
|
||||
for path in sys.path[len(original_sys_path):]
|
||||
)
|
||||
original_sys_path = [
|
||||
path for path in original_sys_path
|
||||
if os.path.normcase(path) not in system_paths
|
||||
]
|
||||
sys.path = original_sys_path
|
||||
|
||||
# Second, add lib directories.
|
||||
# ensuring .pth file are processed.
|
||||
for path in {lib_dirs!r}:
|
||||
assert not path in sys.path
|
||||
site.addsitedir(path)
|
||||
"""
|
||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
|
||||
)
|
||||
|
||||
def __enter__(self) -> None:
|
||||
self._save_env = {
|
||||
name: os.environ.get(name, None)
|
||||
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
|
||||
}
|
||||
|
||||
path = self._bin_dirs[:]
|
||||
old_path = self._save_env["PATH"]
|
||||
if old_path:
|
||||
path.extend(old_path.split(os.pathsep))
|
||||
|
||||
pythonpath = [self._site_dir]
|
||||
|
||||
os.environ.update(
|
||||
{
|
||||
"PATH": os.pathsep.join(path),
|
||||
"PYTHONNOUSERSITE": "1",
|
||||
"PYTHONPATH": os.pathsep.join(pythonpath),
|
||||
}
|
||||
)
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
for varname, old_value in self._save_env.items():
|
||||
if old_value is None:
|
||||
os.environ.pop(varname, None)
|
||||
else:
|
||||
os.environ[varname] = old_value
|
||||
|
||||
def check_requirements(
|
||||
self, reqs: Iterable[str]
|
||||
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
|
||||
"""Return 2 sets:
|
||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||
- missing requirements: set of reqs
|
||||
"""
|
||||
missing = set()
|
||||
conflicting = set()
|
||||
if reqs:
|
||||
env = (
|
||||
get_environment(self._lib_dirs)
|
||||
if hasattr(self, "_lib_dirs")
|
||||
else get_default_environment()
|
||||
)
|
||||
for req_str in reqs:
|
||||
req = get_requirement(req_str)
|
||||
# We're explicitly evaluating with an empty extra value, since build
|
||||
# environments are not provided any mechanism to select specific extras.
|
||||
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
|
||||
continue
|
||||
dist = env.get_distribution(req.name)
|
||||
if not dist:
|
||||
missing.add(req_str)
|
||||
continue
|
||||
if isinstance(dist.version, Version):
|
||||
installed_req_str = f"{req.name}=={dist.version}"
|
||||
else:
|
||||
installed_req_str = f"{req.name}==={dist.version}"
|
||||
if not req.specifier.contains(dist.version, prereleases=True):
|
||||
conflicting.add((installed_req_str, req_str))
|
||||
# FIXME: Consider direct URL?
|
||||
return conflicting, missing
|
||||
|
||||
def install_requirements(
|
||||
self,
|
||||
finder: "PackageFinder",
|
||||
requirements: Iterable[str],
|
||||
prefix_as_string: str,
|
||||
*,
|
||||
kind: str,
|
||||
) -> None:
|
||||
prefix = self._prefixes[prefix_as_string]
|
||||
assert not prefix.setup
|
||||
prefix.setup = True
|
||||
if not requirements:
|
||||
return
|
||||
self._install_requirements(
|
||||
get_runnable_pip(),
|
||||
finder,
|
||||
requirements,
|
||||
prefix,
|
||||
kind=kind,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _install_requirements(
|
||||
pip_runnable: str,
|
||||
finder: "PackageFinder",
|
||||
requirements: Iterable[str],
|
||||
prefix: _Prefix,
|
||||
*,
|
||||
kind: str,
|
||||
) -> None:
|
||||
args: List[str] = [
|
||||
sys.executable,
|
||||
pip_runnable,
|
||||
"install",
|
||||
"--ignore-installed",
|
||||
"--no-user",
|
||||
"--prefix",
|
||||
prefix.path,
|
||||
"--no-warn-script-location",
|
||||
"--disable-pip-version-check",
|
||||
# The prefix specified two lines above, thus
|
||||
# target from config file or env var should be ignored
|
||||
"--target",
|
||||
"",
|
||||
]
|
||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||
args.append("-vv")
|
||||
elif logger.getEffectiveLevel() <= VERBOSE:
|
||||
args.append("-v")
|
||||
for format_control in ("no_binary", "only_binary"):
|
||||
formats = getattr(finder.format_control, format_control)
|
||||
args.extend(
|
||||
(
|
||||
"--" + format_control.replace("_", "-"),
|
||||
",".join(sorted(formats or {":none:"})),
|
||||
)
|
||||
)
|
||||
|
||||
index_urls = finder.index_urls
|
||||
if index_urls:
|
||||
args.extend(["-i", index_urls[0]])
|
||||
for extra_index in index_urls[1:]:
|
||||
args.extend(["--extra-index-url", extra_index])
|
||||
else:
|
||||
args.append("--no-index")
|
||||
for link in finder.find_links:
|
||||
args.extend(["--find-links", link])
|
||||
|
||||
if finder.proxy:
|
||||
args.extend(["--proxy", finder.proxy])
|
||||
for host in finder.trusted_hosts:
|
||||
args.extend(["--trusted-host", host])
|
||||
if finder.custom_cert:
|
||||
args.extend(["--cert", finder.custom_cert])
|
||||
if finder.client_cert:
|
||||
args.extend(["--client-cert", finder.client_cert])
|
||||
if finder.allow_all_prereleases:
|
||||
args.append("--pre")
|
||||
if finder.prefer_binary:
|
||||
args.append("--prefer-binary")
|
||||
args.append("--")
|
||||
args.extend(requirements)
|
||||
with open_spinner(f"Installing {kind}") as spinner:
|
||||
call_subprocess(
|
||||
args,
|
||||
command_desc=f"pip subprocess to install {kind}",
|
||||
spinner=spinner,
|
||||
)
|
||||
|
||||
|
||||
class NoOpBuildEnvironment(BuildEnvironment):
|
||||
"""A no-op drop-in replacement for BuildEnvironment"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def __enter__(self) -> None:
|
||||
pass
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def cleanup(self) -> None:
|
||||
pass
|
||||
|
||||
def install_requirements(
|
||||
self,
|
||||
finder: "PackageFinder",
|
||||
requirements: Iterable[str],
|
||||
prefix_as_string: str,
|
||||
*,
|
||||
kind: str,
|
||||
) -> None:
|
||||
raise NotImplementedError()
|
290
Dependencies/Python/Lib/site-packages/pip/_internal/cache.py
vendored
Normal file
290
Dependencies/Python/Lib/site-packages/pip/_internal/cache.py
vendored
Normal file
@ -0,0 +1,290 @@
|
||||
"""Cache Management
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import InvalidWheelFilename
|
||||
from pip._internal.models.direct_url import DirectUrl
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ORIGIN_JSON_NAME = "origin.json"
|
||||
|
||||
|
||||
def _hash_dict(d: Dict[str, str]) -> str:
|
||||
"""Return a stable sha224 of a dictionary."""
|
||||
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
||||
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
||||
|
||||
|
||||
class Cache:
|
||||
"""An abstract class - provides cache directories for data from links
|
||||
|
||||
:param cache_dir: The root of the cache.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
super().__init__()
|
||||
assert not cache_dir or os.path.isabs(cache_dir)
|
||||
self.cache_dir = cache_dir or None
|
||||
|
||||
def _get_cache_path_parts(self, link: Link) -> List[str]:
|
||||
"""Get parts of part that must be os.path.joined with cache_dir"""
|
||||
|
||||
# We want to generate an url to use as our cache key, we don't want to
|
||||
# just reuse the URL because it might have other items in the fragment
|
||||
# and we don't care about those.
|
||||
key_parts = {"url": link.url_without_fragment}
|
||||
if link.hash_name is not None and link.hash is not None:
|
||||
key_parts[link.hash_name] = link.hash
|
||||
if link.subdirectory_fragment:
|
||||
key_parts["subdirectory"] = link.subdirectory_fragment
|
||||
|
||||
# Include interpreter name, major and minor version in cache key
|
||||
# to cope with ill-behaved sdists that build a different wheel
|
||||
# depending on the python version their setup.py is being run on,
|
||||
# and don't encode the difference in compatibility tags.
|
||||
# https://github.com/pypa/pip/issues/7296
|
||||
key_parts["interpreter_name"] = interpreter_name()
|
||||
key_parts["interpreter_version"] = interpreter_version()
|
||||
|
||||
# Encode our key url with sha224, we'll use this because it has similar
|
||||
# security properties to sha256, but with a shorter total output (and
|
||||
# thus less secure). However the differences don't make a lot of
|
||||
# difference for our use case here.
|
||||
hashed = _hash_dict(key_parts)
|
||||
|
||||
# We want to nest the directories some to prevent having a ton of top
|
||||
# level directories where we might run out of sub directories on some
|
||||
# FS.
|
||||
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||
|
||||
return parts
|
||||
|
||||
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
|
||||
can_not_cache = not self.cache_dir or not canonical_package_name or not link
|
||||
if can_not_cache:
|
||||
return []
|
||||
|
||||
path = self.get_path_for_link(link)
|
||||
if os.path.isdir(path):
|
||||
return [(candidate, path) for candidate in os.listdir(path)]
|
||||
return []
|
||||
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
"""Return a directory to store cached items in for link."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get(
|
||||
self,
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Link:
|
||||
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||
passed link.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class SimpleWheelCache(Cache):
|
||||
"""A cache of wheels for future installs."""
|
||||
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
super().__init__(cache_dir)
|
||||
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
"""Return a directory to store cached wheels for link
|
||||
|
||||
Because there are M wheels for any one sdist, we provide a directory
|
||||
to cache them in, and then consult that directory when looking up
|
||||
cache hits.
|
||||
|
||||
We only insert things into the cache if they have plausible version
|
||||
numbers, so that we don't contaminate the cache with things that were
|
||||
not unique. E.g. ./package might have dozens of installs done for it
|
||||
and build a version of 0.0...and if we built and cached a wheel, we'd
|
||||
end up using the same wheel even if the source has been edited.
|
||||
|
||||
:param link: The link of the sdist for which this will cache wheels.
|
||||
"""
|
||||
parts = self._get_cache_path_parts(link)
|
||||
assert self.cache_dir
|
||||
# Store wheels within the root cache_dir
|
||||
return os.path.join(self.cache_dir, "wheels", *parts)
|
||||
|
||||
def get(
|
||||
self,
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Link:
|
||||
candidates = []
|
||||
|
||||
if not package_name:
|
||||
return link
|
||||
|
||||
canonical_package_name = canonicalize_name(package_name)
|
||||
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
|
||||
try:
|
||||
wheel = Wheel(wheel_name)
|
||||
except InvalidWheelFilename:
|
||||
continue
|
||||
if canonicalize_name(wheel.name) != canonical_package_name:
|
||||
logger.debug(
|
||||
"Ignoring cached wheel %s for %s as it "
|
||||
"does not match the expected distribution name %s.",
|
||||
wheel_name,
|
||||
link,
|
||||
package_name,
|
||||
)
|
||||
continue
|
||||
if not wheel.supported(supported_tags):
|
||||
# Built for a different python/arch/etc
|
||||
continue
|
||||
candidates.append(
|
||||
(
|
||||
wheel.support_index_min(supported_tags),
|
||||
wheel_name,
|
||||
wheel_dir,
|
||||
)
|
||||
)
|
||||
|
||||
if not candidates:
|
||||
return link
|
||||
|
||||
_, wheel_name, wheel_dir = min(candidates)
|
||||
return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
|
||||
|
||||
|
||||
class EphemWheelCache(SimpleWheelCache):
|
||||
"""A SimpleWheelCache that creates it's own temporary cache directory"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._temp_dir = TempDirectory(
|
||||
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
super().__init__(self._temp_dir.path)
|
||||
|
||||
|
||||
class CacheEntry:
|
||||
def __init__(
|
||||
self,
|
||||
link: Link,
|
||||
persistent: bool,
|
||||
):
|
||||
self.link = link
|
||||
self.persistent = persistent
|
||||
self.origin: Optional[DirectUrl] = None
|
||||
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
|
||||
if origin_direct_url_path.exists():
|
||||
try:
|
||||
self.origin = DirectUrl.from_json(
|
||||
origin_direct_url_path.read_text(encoding="utf-8")
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Ignoring invalid cache entry origin file %s for %s (%s)",
|
||||
origin_direct_url_path,
|
||||
link.filename,
|
||||
e,
|
||||
)
|
||||
|
||||
|
||||
class WheelCache(Cache):
|
||||
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
||||
|
||||
This Cache allows for gracefully degradation, using the ephem wheel cache
|
||||
when a certain link is not found in the simple wheel cache first.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
super().__init__(cache_dir)
|
||||
self._wheel_cache = SimpleWheelCache(cache_dir)
|
||||
self._ephem_cache = EphemWheelCache()
|
||||
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
return self._wheel_cache.get_path_for_link(link)
|
||||
|
||||
def get_ephem_path_for_link(self, link: Link) -> str:
|
||||
return self._ephem_cache.get_path_for_link(link)
|
||||
|
||||
def get(
|
||||
self,
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Link:
|
||||
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
||||
if cache_entry is None:
|
||||
return link
|
||||
return cache_entry.link
|
||||
|
||||
def get_cache_entry(
|
||||
self,
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Optional[CacheEntry]:
|
||||
"""Returns a CacheEntry with a link to a cached item if it exists or
|
||||
None. The cache entry indicates if the item was found in the persistent
|
||||
or ephemeral cache.
|
||||
"""
|
||||
retval = self._wheel_cache.get(
|
||||
link=link,
|
||||
package_name=package_name,
|
||||
supported_tags=supported_tags,
|
||||
)
|
||||
if retval is not link:
|
||||
return CacheEntry(retval, persistent=True)
|
||||
|
||||
retval = self._ephem_cache.get(
|
||||
link=link,
|
||||
package_name=package_name,
|
||||
supported_tags=supported_tags,
|
||||
)
|
||||
if retval is not link:
|
||||
return CacheEntry(retval, persistent=False)
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
|
||||
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
|
||||
if origin_path.exists():
|
||||
try:
|
||||
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Could not read origin file %s in cache entry (%s). "
|
||||
"Will attempt to overwrite it.",
|
||||
origin_path,
|
||||
e,
|
||||
)
|
||||
else:
|
||||
# TODO: use DirectUrl.equivalent when
|
||||
# https://github.com/pypa/pip/pull/10564 is merged.
|
||||
if origin.url != download_info.url:
|
||||
logger.warning(
|
||||
"Origin URL %s in cache entry %s does not match download URL "
|
||||
"%s. This is likely a pip bug or a cache corruption issue. "
|
||||
"Will overwrite it with the new value.",
|
||||
origin.url,
|
||||
cache_dir,
|
||||
download_info.url,
|
||||
)
|
||||
origin_path.write_text(download_info.to_json(), encoding="utf-8")
|
4
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__init__.py
vendored
Normal file
4
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__init__.py
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
"""Subpackage containing all of pip's command line interface related code
|
||||
"""
|
||||
|
||||
# This file intentionally does not import submodules
|
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-313.pyc
vendored
Normal file
Binary file not shown.
176
Dependencies/Python/Lib/site-packages/pip/_internal/cli/autocompletion.py
vendored
Normal file
176
Dependencies/Python/Lib/site-packages/pip/_internal/cli/autocompletion.py
vendored
Normal file
@ -0,0 +1,176 @@
|
||||
"""Logic that powers autocompletion installed by ``pip completion``.
|
||||
"""
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
from itertools import chain
|
||||
from typing import Any, Iterable, List, Optional
|
||||
|
||||
from pip._internal.cli.main_parser import create_main_parser
|
||||
from pip._internal.commands import commands_dict, create_command
|
||||
from pip._internal.metadata import get_default_environment
|
||||
|
||||
|
||||
def autocomplete() -> None:
|
||||
"""Entry Point for completion of main and subcommand options."""
|
||||
# Don't complete if user hasn't sourced bash_completion file.
|
||||
if "PIP_AUTO_COMPLETE" not in os.environ:
|
||||
return
|
||||
# Don't complete if autocompletion environment variables
|
||||
# are not present
|
||||
if not os.environ.get("COMP_WORDS") or not os.environ.get("COMP_CWORD"):
|
||||
return
|
||||
cwords = os.environ["COMP_WORDS"].split()[1:]
|
||||
cword = int(os.environ["COMP_CWORD"])
|
||||
try:
|
||||
current = cwords[cword - 1]
|
||||
except IndexError:
|
||||
current = ""
|
||||
|
||||
parser = create_main_parser()
|
||||
subcommands = list(commands_dict)
|
||||
options = []
|
||||
|
||||
# subcommand
|
||||
subcommand_name: Optional[str] = None
|
||||
for word in cwords:
|
||||
if word in subcommands:
|
||||
subcommand_name = word
|
||||
break
|
||||
# subcommand options
|
||||
if subcommand_name is not None:
|
||||
# special case: 'help' subcommand has no options
|
||||
if subcommand_name == "help":
|
||||
sys.exit(1)
|
||||
# special case: list locally installed dists for show and uninstall
|
||||
should_list_installed = not current.startswith("-") and subcommand_name in [
|
||||
"show",
|
||||
"uninstall",
|
||||
]
|
||||
if should_list_installed:
|
||||
env = get_default_environment()
|
||||
lc = current.lower()
|
||||
installed = [
|
||||
dist.canonical_name
|
||||
for dist in env.iter_installed_distributions(local_only=True)
|
||||
if dist.canonical_name.startswith(lc)
|
||||
and dist.canonical_name not in cwords[1:]
|
||||
]
|
||||
# if there are no dists installed, fall back to option completion
|
||||
if installed:
|
||||
for dist in installed:
|
||||
print(dist)
|
||||
sys.exit(1)
|
||||
|
||||
should_list_installables = (
|
||||
not current.startswith("-") and subcommand_name == "install"
|
||||
)
|
||||
if should_list_installables:
|
||||
for path in auto_complete_paths(current, "path"):
|
||||
print(path)
|
||||
sys.exit(1)
|
||||
|
||||
subcommand = create_command(subcommand_name)
|
||||
|
||||
for opt in subcommand.parser.option_list_all:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
options += [
|
||||
(opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
|
||||
]
|
||||
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
|
||||
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||
# filter options by current input
|
||||
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||
# get completion type given cwords and available subcommand options
|
||||
completion_type = get_path_completion_type(
|
||||
cwords,
|
||||
cword,
|
||||
subcommand.parser.option_list_all,
|
||||
)
|
||||
# get completion files and directories if ``completion_type`` is
|
||||
# ``<file>``, ``<dir>`` or ``<path>``
|
||||
if completion_type:
|
||||
paths = auto_complete_paths(current, completion_type)
|
||||
options = [(path, 0) for path in paths]
|
||||
for option in options:
|
||||
opt_label = option[0]
|
||||
# append '=' to options which require args
|
||||
if option[1] and option[0][:2] == "--":
|
||||
opt_label += "="
|
||||
print(opt_label)
|
||||
else:
|
||||
# show main parser options only when necessary
|
||||
|
||||
opts = [i.option_list for i in parser.option_groups]
|
||||
opts.append(parser.option_list)
|
||||
flattened_opts = chain.from_iterable(opts)
|
||||
if current.startswith("-"):
|
||||
for opt in flattened_opts:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
subcommands += opt._long_opts + opt._short_opts
|
||||
else:
|
||||
# get completion type given cwords and all available options
|
||||
completion_type = get_path_completion_type(cwords, cword, flattened_opts)
|
||||
if completion_type:
|
||||
subcommands = list(auto_complete_paths(current, completion_type))
|
||||
|
||||
print(" ".join([x for x in subcommands if x.startswith(current)]))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_path_completion_type(
|
||||
cwords: List[str], cword: int, opts: Iterable[Any]
|
||||
) -> Optional[str]:
|
||||
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
||||
|
||||
:param cwords: same as the environmental variable ``COMP_WORDS``
|
||||
:param cword: same as the environmental variable ``COMP_CWORD``
|
||||
:param opts: The available options to check
|
||||
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
||||
"""
|
||||
if cword < 2 or not cwords[cword - 2].startswith("-"):
|
||||
return None
|
||||
for opt in opts:
|
||||
if opt.help == optparse.SUPPRESS_HELP:
|
||||
continue
|
||||
for o in str(opt).split("/"):
|
||||
if cwords[cword - 2].split("=")[0] == o:
|
||||
if not opt.metavar or any(
|
||||
x in ("path", "file", "dir") for x in opt.metavar.split("/")
|
||||
):
|
||||
return opt.metavar
|
||||
return None
|
||||
|
||||
|
||||
def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
|
||||
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
||||
and directories starting with ``current``; otherwise only list directories
|
||||
starting with ``current``.
|
||||
|
||||
:param current: The word to be completed
|
||||
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
|
||||
:return: A generator of regular files and/or directories
|
||||
"""
|
||||
directory, filename = os.path.split(current)
|
||||
current_path = os.path.abspath(directory)
|
||||
# Don't complete paths if they can't be accessed
|
||||
if not os.access(current_path, os.R_OK):
|
||||
return
|
||||
filename = os.path.normcase(filename)
|
||||
# list all files that start with ``filename``
|
||||
file_list = (
|
||||
x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
|
||||
)
|
||||
for f in file_list:
|
||||
opt = os.path.join(current_path, f)
|
||||
comp_file = os.path.normcase(os.path.join(directory, f))
|
||||
# complete regular files when there is not ``<dir>`` after option
|
||||
# complete directories when there is ``<file>``, ``<path>`` or
|
||||
# ``<dir>``after option
|
||||
if completion_type != "dir" and os.path.isfile(opt):
|
||||
yield comp_file
|
||||
elif os.path.isdir(opt):
|
||||
yield os.path.join(comp_file, "")
|
240
Dependencies/Python/Lib/site-packages/pip/_internal/cli/base_command.py
vendored
Normal file
240
Dependencies/Python/Lib/site-packages/pip/_internal/cli/base_command.py
vendored
Normal file
@ -0,0 +1,240 @@
|
||||
"""Base Command class, and related routines"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from optparse import Values
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._vendor.rich import reconfigure
|
||||
from pip._vendor.rich import traceback as rich_traceback
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.command_context import CommandContextMixIn
|
||||
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip._internal.cli.status_codes import (
|
||||
ERROR,
|
||||
PREVIOUS_BUILD_DIR_ERROR,
|
||||
UNKNOWN_ERROR,
|
||||
VIRTUALENV_NOT_FOUND,
|
||||
)
|
||||
from pip._internal.exceptions import (
|
||||
BadCommand,
|
||||
CommandError,
|
||||
DiagnosticPipError,
|
||||
InstallationError,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
)
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||
from pip._internal.utils.misc import get_prog, normalize_path
|
||||
from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
|
||||
from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
__all__ = ["Command"]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(CommandContextMixIn):
|
||||
usage: str = ""
|
||||
ignore_require_venv: bool = False
|
||||
|
||||
def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.name = name
|
||||
self.summary = summary
|
||||
self.parser = ConfigOptionParser(
|
||||
usage=self.usage,
|
||||
prog=f"{get_prog()} {name}",
|
||||
formatter=UpdatingDefaultsHelpFormatter(),
|
||||
add_help_option=False,
|
||||
name=name,
|
||||
description=self.__doc__,
|
||||
isolated=isolated,
|
||||
)
|
||||
|
||||
self.tempdir_registry: Optional[TempDirRegistry] = None
|
||||
|
||||
# Commands should add options to this option group
|
||||
optgroup_name = f"{self.name.capitalize()} Options"
|
||||
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||
|
||||
# Add the general options
|
||||
gen_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.general_group,
|
||||
self.parser,
|
||||
)
|
||||
self.parser.add_option_group(gen_opts)
|
||||
|
||||
self.add_options()
|
||||
|
||||
def add_options(self) -> None:
|
||||
pass
|
||||
|
||||
def handle_pip_version_check(self, options: Values) -> None:
|
||||
"""
|
||||
This is a no-op so that commands by default do not do the pip version
|
||||
check.
|
||||
"""
|
||||
# Make sure we do the pip version check if the index_group options
|
||||
# are present.
|
||||
assert not hasattr(options, "no_index")
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
raise NotImplementedError
|
||||
|
||||
def _run_wrapper(self, level_number: int, options: Values, args: List[str]) -> int:
|
||||
def _inner_run() -> int:
|
||||
try:
|
||||
return self.run(options, args)
|
||||
finally:
|
||||
self.handle_pip_version_check(options)
|
||||
|
||||
if options.debug_mode:
|
||||
rich_traceback.install(show_locals=True)
|
||||
return _inner_run()
|
||||
|
||||
try:
|
||||
status = _inner_run()
|
||||
assert isinstance(status, int)
|
||||
return status
|
||||
except DiagnosticPipError as exc:
|
||||
logger.error("%s", exc, extra={"rich": True})
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except PreviousBuildDirError as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return PREVIOUS_BUILD_DIR_ERROR
|
||||
except (
|
||||
InstallationError,
|
||||
BadCommand,
|
||||
NetworkConnectionError,
|
||||
) as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except CommandError as exc:
|
||||
logger.critical("%s", exc)
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BrokenStdoutLoggingError:
|
||||
# Bypass our logger and write any remaining messages to
|
||||
# stderr because stdout no longer works.
|
||||
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
||||
if level_number <= logging.DEBUG:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
return ERROR
|
||||
except KeyboardInterrupt:
|
||||
logger.critical("Operation cancelled by user")
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BaseException:
|
||||
logger.critical("Exception:", exc_info=True)
|
||||
|
||||
return UNKNOWN_ERROR
|
||||
|
||||
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
|
||||
# factored out for testability
|
||||
return self.parser.parse_args(args)
|
||||
|
||||
def main(self, args: List[str]) -> int:
|
||||
try:
|
||||
with self.main_context():
|
||||
return self._main(args)
|
||||
finally:
|
||||
logging.shutdown()
|
||||
|
||||
def _main(self, args: List[str]) -> int:
|
||||
# We must initialize this before the tempdir manager, otherwise the
|
||||
# configuration would not be accessible by the time we clean up the
|
||||
# tempdir manager.
|
||||
self.tempdir_registry = self.enter_context(tempdir_registry())
|
||||
# Intentionally set as early as possible so globally-managed temporary
|
||||
# directories are available to the rest of the code.
|
||||
self.enter_context(global_tempdir_manager())
|
||||
|
||||
options, args = self.parse_args(args)
|
||||
|
||||
# Set verbosity so that it can be used elsewhere.
|
||||
self.verbosity = options.verbose - options.quiet
|
||||
|
||||
reconfigure(no_color=options.no_color)
|
||||
level_number = setup_logging(
|
||||
verbosity=self.verbosity,
|
||||
no_color=options.no_color,
|
||||
user_log_file=options.log,
|
||||
)
|
||||
|
||||
always_enabled_features = set(options.features_enabled) & set(
|
||||
cmdoptions.ALWAYS_ENABLED_FEATURES
|
||||
)
|
||||
if always_enabled_features:
|
||||
logger.warning(
|
||||
"The following features are always enabled: %s. ",
|
||||
", ".join(sorted(always_enabled_features)),
|
||||
)
|
||||
|
||||
# Make sure that the --python argument isn't specified after the
|
||||
# subcommand. We can tell, because if --python was specified,
|
||||
# we should only reach this point if we're running in the created
|
||||
# subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
|
||||
# variable set.
|
||||
if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
||||
logger.critical(
|
||||
"The --python option must be placed before the pip subcommand name"
|
||||
)
|
||||
sys.exit(ERROR)
|
||||
|
||||
# TODO: Try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
# This also affects isolated builds and it should.
|
||||
|
||||
if options.no_input:
|
||||
os.environ["PIP_NO_INPUT"] = "1"
|
||||
|
||||
if options.exists_action:
|
||||
os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
|
||||
|
||||
if options.require_venv and not self.ignore_require_venv:
|
||||
# If a venv is required check if it can really be found
|
||||
if not running_under_virtualenv():
|
||||
logger.critical("Could not find an activated virtualenv (required).")
|
||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||
|
||||
if options.cache_dir:
|
||||
options.cache_dir = normalize_path(options.cache_dir)
|
||||
if not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"or is not writable by the current user. The cache "
|
||||
"has been disabled. Check the permissions and owner of "
|
||||
"that directory. If executing pip with sudo, you should "
|
||||
"use sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
if options.no_python_version_warning:
|
||||
deprecated(
|
||||
reason="--no-python-version-warning is deprecated.",
|
||||
replacement="to remove the flag as it's a no-op",
|
||||
gone_in="25.1",
|
||||
issue=13154,
|
||||
)
|
||||
|
||||
return self._run_wrapper(level_number, options, args)
|
1075
Dependencies/Python/Lib/site-packages/pip/_internal/cli/cmdoptions.py
vendored
Normal file
1075
Dependencies/Python/Lib/site-packages/pip/_internal/cli/cmdoptions.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
27
Dependencies/Python/Lib/site-packages/pip/_internal/cli/command_context.py
vendored
Normal file
27
Dependencies/Python/Lib/site-packages/pip/_internal/cli/command_context.py
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from typing import ContextManager, Generator, TypeVar
|
||||
|
||||
_T = TypeVar("_T", covariant=True)
|
||||
|
||||
|
||||
class CommandContextMixIn:
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._in_main_context = False
|
||||
self._main_context = ExitStack()
|
||||
|
||||
@contextmanager
|
||||
def main_context(self) -> Generator[None, None, None]:
|
||||
assert not self._in_main_context
|
||||
|
||||
self._in_main_context = True
|
||||
try:
|
||||
with self._main_context:
|
||||
yield
|
||||
finally:
|
||||
self._in_main_context = False
|
||||
|
||||
def enter_context(self, context_provider: ContextManager[_T]) -> _T:
|
||||
assert self._in_main_context
|
||||
|
||||
return self._main_context.enter_context(context_provider)
|
171
Dependencies/Python/Lib/site-packages/pip/_internal/cli/index_command.py
vendored
Normal file
171
Dependencies/Python/Lib/site-packages/pip/_internal/cli/index_command.py
vendored
Normal file
@ -0,0 +1,171 @@
|
||||
"""
|
||||
Contains command classes which may interact with an index / the network.
|
||||
|
||||
Unlike its sister module, req_command, this module still uses lazy imports
|
||||
so commands which don't always hit the network (e.g. list w/o --outdated or
|
||||
--uptodate) don't need waste time importing PipSession and friends.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from optparse import Values
|
||||
from typing import TYPE_CHECKING, List, Optional
|
||||
|
||||
from pip._vendor import certifi
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.command_context import CommandContextMixIn
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ssl import SSLContext
|
||||
|
||||
from pip._internal.network.session import PipSession
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
|
||||
if sys.version_info < (3, 10):
|
||||
logger.debug("Disabling truststore because Python version isn't 3.10+")
|
||||
return None
|
||||
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
logger.warning("Disabling truststore since ssl support is missing")
|
||||
return None
|
||||
|
||||
try:
|
||||
from pip._vendor import truststore
|
||||
except ImportError:
|
||||
logger.warning("Disabling truststore because platform isn't supported")
|
||||
return None
|
||||
|
||||
ctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
ctx.load_verify_locations(certifi.where())
|
||||
return ctx
|
||||
|
||||
|
||||
class SessionCommandMixin(CommandContextMixIn):
|
||||
"""
|
||||
A class mixin for command classes needing _build_session().
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._session: Optional[PipSession] = None
|
||||
|
||||
@classmethod
|
||||
def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
|
||||
"""Return a list of index urls from user-provided options."""
|
||||
index_urls = []
|
||||
if not getattr(options, "no_index", False):
|
||||
url = getattr(options, "index_url", None)
|
||||
if url:
|
||||
index_urls.append(url)
|
||||
urls = getattr(options, "extra_index_urls", None)
|
||||
if urls:
|
||||
index_urls.extend(urls)
|
||||
# Return None rather than an empty list
|
||||
return index_urls or None
|
||||
|
||||
def get_default_session(self, options: Values) -> "PipSession":
|
||||
"""Get a default-managed session."""
|
||||
if self._session is None:
|
||||
self._session = self.enter_context(self._build_session(options))
|
||||
# there's no type annotation on requests.Session, so it's
|
||||
# automatically ContextManager[Any] and self._session becomes Any,
|
||||
# then https://github.com/python/mypy/issues/7696 kicks in
|
||||
assert self._session is not None
|
||||
return self._session
|
||||
|
||||
def _build_session(
|
||||
self,
|
||||
options: Values,
|
||||
retries: Optional[int] = None,
|
||||
timeout: Optional[int] = None,
|
||||
) -> "PipSession":
|
||||
from pip._internal.network.session import PipSession
|
||||
|
||||
cache_dir = options.cache_dir
|
||||
assert not cache_dir or os.path.isabs(cache_dir)
|
||||
|
||||
if "legacy-certs" not in options.deprecated_features_enabled:
|
||||
ssl_context = _create_truststore_ssl_context()
|
||||
else:
|
||||
ssl_context = None
|
||||
|
||||
session = PipSession(
|
||||
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
|
||||
retries=retries if retries is not None else options.retries,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
index_urls=self._get_index_urls(options),
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
|
||||
# Handle custom ca-bundles from the user
|
||||
if options.cert:
|
||||
session.verify = options.cert
|
||||
|
||||
# Handle SSL client certificate
|
||||
if options.client_cert:
|
||||
session.cert = options.client_cert
|
||||
|
||||
# Handle timeouts
|
||||
if options.timeout or timeout:
|
||||
session.timeout = timeout if timeout is not None else options.timeout
|
||||
|
||||
# Handle configured proxies
|
||||
if options.proxy:
|
||||
session.proxies = {
|
||||
"http": options.proxy,
|
||||
"https": options.proxy,
|
||||
}
|
||||
session.trust_env = False
|
||||
session.pip_proxy = options.proxy
|
||||
|
||||
# Determine if we can prompt the user for authentication or not
|
||||
session.auth.prompting = not options.no_input
|
||||
session.auth.keyring_provider = options.keyring_provider
|
||||
|
||||
return session
|
||||
|
||||
|
||||
def _pip_self_version_check(session: "PipSession", options: Values) -> None:
|
||||
from pip._internal.self_outdated_check import pip_self_version_check as check
|
||||
|
||||
check(session, options)
|
||||
|
||||
|
||||
class IndexGroupCommand(Command, SessionCommandMixin):
|
||||
"""
|
||||
Abstract base class for commands with the index_group options.
|
||||
|
||||
This also corresponds to the commands that permit the pip version check.
|
||||
"""
|
||||
|
||||
def handle_pip_version_check(self, options: Values) -> None:
|
||||
"""
|
||||
Do the pip version check if not disabled.
|
||||
|
||||
This overrides the default behavior of not doing the check.
|
||||
"""
|
||||
# Make sure the index_group options are present.
|
||||
assert hasattr(options, "no_index")
|
||||
|
||||
if options.disable_pip_version_check or options.no_index:
|
||||
return
|
||||
|
||||
try:
|
||||
# Otherwise, check if we're using the latest version of pip available.
|
||||
session = self._build_session(
|
||||
options,
|
||||
retries=0,
|
||||
timeout=min(5, options.timeout),
|
||||
)
|
||||
with session:
|
||||
_pip_self_version_check(session, options)
|
||||
except Exception:
|
||||
logger.warning("There was an error checking the latest version of pip.")
|
||||
logger.debug("See below for error", exc_info=True)
|
80
Dependencies/Python/Lib/site-packages/pip/_internal/cli/main.py
vendored
Normal file
80
Dependencies/Python/Lib/site-packages/pip/_internal/cli/main.py
vendored
Normal file
@ -0,0 +1,80 @@
|
||||
"""Primary application entrypoint.
|
||||
"""
|
||||
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.cli.autocompletion import autocomplete
|
||||
from pip._internal.cli.main_parser import parse_command
|
||||
from pip._internal.commands import create_command
|
||||
from pip._internal.exceptions import PipError
|
||||
from pip._internal.utils import deprecation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Do not import and use main() directly! Using it directly is actively
|
||||
# discouraged by pip's maintainers. The name, location and behavior of
|
||||
# this function is subject to change, so calling it directly is not
|
||||
# portable across different pip versions.
|
||||
|
||||
# In addition, running pip in-process is unsupported and unsafe. This is
|
||||
# elaborated in detail at
|
||||
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
|
||||
# That document also provides suggestions that should work for nearly
|
||||
# all users that are considering importing and using main() directly.
|
||||
|
||||
# However, we know that certain users will still want to invoke pip
|
||||
# in-process. If you understand and accept the implications of using pip
|
||||
# in an unsupported manner, the best approach is to use runpy to avoid
|
||||
# depending on the exact location of this entry point.
|
||||
|
||||
# The following example shows how to use runpy to invoke pip in that
|
||||
# case:
|
||||
#
|
||||
# sys.argv = ["pip", your, args, here]
|
||||
# runpy.run_module("pip", run_name="__main__")
|
||||
#
|
||||
# Note that this will exit the process after running, unlike a direct
|
||||
# call to main. As it is not safe to do any processing after calling
|
||||
# main, this should not be an issue in practice.
|
||||
|
||||
|
||||
def main(args: Optional[List[str]] = None) -> int:
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Suppress the pkg_resources deprecation warning
|
||||
# Note - we use a module of .*pkg_resources to cover
|
||||
# the normal case (pip._vendor.pkg_resources) and the
|
||||
# devendored case (a bare pkg_resources)
|
||||
warnings.filterwarnings(
|
||||
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
|
||||
)
|
||||
|
||||
# Configure our deprecation warnings to be sent through loggers
|
||||
deprecation.install_warning_logger()
|
||||
|
||||
autocomplete()
|
||||
|
||||
try:
|
||||
cmd_name, cmd_args = parse_command(args)
|
||||
except PipError as exc:
|
||||
sys.stderr.write(f"ERROR: {exc}")
|
||||
sys.stderr.write(os.linesep)
|
||||
sys.exit(1)
|
||||
|
||||
# Needed for locale.getpreferredencoding(False) to work
|
||||
# in pip._internal.utils.encoding.auto_decode
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
except locale.Error as e:
|
||||
# setlocale can apparently crash if locale are uninitialized
|
||||
logger.debug("Ignoring error %s when setting locale", e)
|
||||
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
|
||||
|
||||
return command.main(cmd_args)
|
134
Dependencies/Python/Lib/site-packages/pip/_internal/cli/main_parser.py
vendored
Normal file
134
Dependencies/Python/Lib/site-packages/pip/_internal/cli/main_parser.py
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
"""A single place for constructing and exposing the main parser
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._internal.build_env import get_runnable_pip
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip._internal.commands import commands_dict, get_similar_commands
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.utils.misc import get_pip_version, get_prog
|
||||
|
||||
__all__ = ["create_main_parser", "parse_command"]
|
||||
|
||||
|
||||
def create_main_parser() -> ConfigOptionParser:
|
||||
"""Creates and returns the main parser for pip's CLI"""
|
||||
|
||||
parser = ConfigOptionParser(
|
||||
usage="\n%prog <command> [options]",
|
||||
add_help_option=False,
|
||||
formatter=UpdatingDefaultsHelpFormatter(),
|
||||
name="global",
|
||||
prog=get_prog(),
|
||||
)
|
||||
parser.disable_interspersed_args()
|
||||
|
||||
parser.version = get_pip_version()
|
||||
|
||||
# add the general options
|
||||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||
parser.add_option_group(gen_opts)
|
||||
|
||||
# so the help formatter knows
|
||||
parser.main = True # type: ignore
|
||||
|
||||
# create command listing for description
|
||||
description = [""] + [
|
||||
f"{name:27} {command_info.summary}"
|
||||
for name, command_info in commands_dict.items()
|
||||
]
|
||||
parser.description = "\n".join(description)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def identify_python_interpreter(python: str) -> Optional[str]:
|
||||
# If the named file exists, use it.
|
||||
# If it's a directory, assume it's a virtual environment and
|
||||
# look for the environment's Python executable.
|
||||
if os.path.exists(python):
|
||||
if os.path.isdir(python):
|
||||
# bin/python for Unix, Scripts/python.exe for Windows
|
||||
# Try both in case of odd cases like cygwin.
|
||||
for exe in ("bin/python", "Scripts/python.exe"):
|
||||
py = os.path.join(python, exe)
|
||||
if os.path.exists(py):
|
||||
return py
|
||||
else:
|
||||
return python
|
||||
|
||||
# Could not find the interpreter specified
|
||||
return None
|
||||
|
||||
|
||||
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
||||
parser = create_main_parser()
|
||||
|
||||
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||
# call is to split the initial args into the general options before the
|
||||
# subcommand and everything else.
|
||||
# For example:
|
||||
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||
# general_options: ['--timeout==5']
|
||||
# args_else: ['install', '--user', 'INITools']
|
||||
general_options, args_else = parser.parse_args(args)
|
||||
|
||||
# --python
|
||||
if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
||||
# Re-invoke pip using the specified Python interpreter
|
||||
interpreter = identify_python_interpreter(general_options.python)
|
||||
if interpreter is None:
|
||||
raise CommandError(
|
||||
f"Could not locate Python interpreter {general_options.python}"
|
||||
)
|
||||
|
||||
pip_cmd = [
|
||||
interpreter,
|
||||
get_runnable_pip(),
|
||||
]
|
||||
pip_cmd.extend(args)
|
||||
|
||||
# Set a flag so the child doesn't re-invoke itself, causing
|
||||
# an infinite loop.
|
||||
os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
|
||||
returncode = 0
|
||||
try:
|
||||
proc = subprocess.run(pip_cmd)
|
||||
returncode = proc.returncode
|
||||
except (subprocess.SubprocessError, OSError) as exc:
|
||||
raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
|
||||
sys.exit(returncode)
|
||||
|
||||
# --version
|
||||
if general_options.version:
|
||||
sys.stdout.write(parser.version)
|
||||
sys.stdout.write(os.linesep)
|
||||
sys.exit()
|
||||
|
||||
# pip || pip help -> print_help()
|
||||
if not args_else or (args_else[0] == "help" and len(args_else) == 1):
|
||||
parser.print_help()
|
||||
sys.exit()
|
||||
|
||||
# the subcommand name
|
||||
cmd_name = args_else[0]
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = [f'unknown command "{cmd_name}"']
|
||||
if guess:
|
||||
msg.append(f'maybe you meant "{guess}"')
|
||||
|
||||
raise CommandError(" - ".join(msg))
|
||||
|
||||
# all the args without the subcommand
|
||||
cmd_args = args[:]
|
||||
cmd_args.remove(cmd_name)
|
||||
|
||||
return cmd_name, cmd_args
|
294
Dependencies/Python/Lib/site-packages/pip/_internal/cli/parser.py
vendored
Normal file
294
Dependencies/Python/Lib/site-packages/pip/_internal/cli/parser.py
vendored
Normal file
@ -0,0 +1,294 @@
|
||||
"""Base option parser setup"""
|
||||
|
||||
import logging
|
||||
import optparse
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
from contextlib import suppress
|
||||
from typing import Any, Dict, Generator, List, NoReturn, Optional, Tuple
|
||||
|
||||
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||
from pip._internal.configuration import Configuration, ConfigurationError
|
||||
from pip._internal.utils.misc import redact_auth_from_url, strtobool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
"""A prettier/less verbose help formatter for optparse."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
# help position must be aligned with __init__.parseopts.description
|
||||
kwargs["max_help_position"] = 30
|
||||
kwargs["indent_increment"] = 1
|
||||
kwargs["width"] = shutil.get_terminal_size()[0] - 2
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def format_option_strings(self, option: optparse.Option) -> str:
|
||||
return self._format_option_strings(option)
|
||||
|
||||
def _format_option_strings(
|
||||
self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
|
||||
) -> str:
|
||||
"""
|
||||
Return a comma-separated list of option strings and metavars.
|
||||
|
||||
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||
:param mvarfmt: metavar format string
|
||||
:param optsep: separator
|
||||
"""
|
||||
opts = []
|
||||
|
||||
if option._short_opts:
|
||||
opts.append(option._short_opts[0])
|
||||
if option._long_opts:
|
||||
opts.append(option._long_opts[0])
|
||||
if len(opts) > 1:
|
||||
opts.insert(1, optsep)
|
||||
|
||||
if option.takes_value():
|
||||
assert option.dest is not None
|
||||
metavar = option.metavar or option.dest.lower()
|
||||
opts.append(mvarfmt.format(metavar.lower()))
|
||||
|
||||
return "".join(opts)
|
||||
|
||||
def format_heading(self, heading: str) -> str:
|
||||
if heading == "Options":
|
||||
return ""
|
||||
return heading + ":\n"
|
||||
|
||||
def format_usage(self, usage: str) -> str:
|
||||
"""
|
||||
Ensure there is only one newline between usage and the first heading
|
||||
if there is no description.
|
||||
"""
|
||||
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
|
||||
return msg
|
||||
|
||||
def format_description(self, description: Optional[str]) -> str:
|
||||
# leave full control over description to us
|
||||
if description:
|
||||
if hasattr(self.parser, "main"):
|
||||
label = "Commands"
|
||||
else:
|
||||
label = "Description"
|
||||
# some doc strings have initial newlines, some don't
|
||||
description = description.lstrip("\n")
|
||||
# some doc strings have final newlines and spaces, some don't
|
||||
description = description.rstrip()
|
||||
# dedent, then reindent
|
||||
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||
description = f"{label}:\n{description}\n"
|
||||
return description
|
||||
else:
|
||||
return ""
|
||||
|
||||
def format_epilog(self, epilog: Optional[str]) -> str:
|
||||
# leave full control over epilog to us
|
||||
if epilog:
|
||||
return epilog
|
||||
else:
|
||||
return ""
|
||||
|
||||
def indent_lines(self, text: str, indent: str) -> str:
|
||||
new_lines = [indent + line for line in text.split("\n")]
|
||||
return "\n".join(new_lines)
|
||||
|
||||
|
||||
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||
"""Custom help formatter for use in ConfigOptionParser.
|
||||
|
||||
This is updates the defaults before expanding them, allowing
|
||||
them to show up correctly in the help listing.
|
||||
|
||||
Also redact auth from url type options
|
||||
"""
|
||||
|
||||
def expand_default(self, option: optparse.Option) -> str:
|
||||
default_values = None
|
||||
if self.parser is not None:
|
||||
assert isinstance(self.parser, ConfigOptionParser)
|
||||
self.parser._update_defaults(self.parser.defaults)
|
||||
assert option.dest is not None
|
||||
default_values = self.parser.defaults.get(option.dest)
|
||||
help_text = super().expand_default(option)
|
||||
|
||||
if default_values and option.metavar == "URL":
|
||||
if isinstance(default_values, str):
|
||||
default_values = [default_values]
|
||||
|
||||
# If its not a list, we should abort and just return the help text
|
||||
if not isinstance(default_values, list):
|
||||
default_values = []
|
||||
|
||||
for val in default_values:
|
||||
help_text = help_text.replace(val, redact_auth_from_url(val))
|
||||
|
||||
return help_text
|
||||
|
||||
|
||||
class CustomOptionParser(optparse.OptionParser):
|
||||
def insert_option_group(
|
||||
self, idx: int, *args: Any, **kwargs: Any
|
||||
) -> optparse.OptionGroup:
|
||||
"""Insert an OptionGroup at a given position."""
|
||||
group = self.add_option_group(*args, **kwargs)
|
||||
|
||||
self.option_groups.pop()
|
||||
self.option_groups.insert(idx, group)
|
||||
|
||||
return group
|
||||
|
||||
@property
|
||||
def option_list_all(self) -> List[optparse.Option]:
|
||||
"""Get a list of all options, including those in option groups."""
|
||||
res = self.option_list[:]
|
||||
for i in self.option_groups:
|
||||
res.extend(i.option_list)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class ConfigOptionParser(CustomOptionParser):
|
||||
"""Custom option parser which updates its defaults by checking the
|
||||
configuration files and environmental variables"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args: Any,
|
||||
name: str,
|
||||
isolated: bool = False,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.config = Configuration(isolated)
|
||||
|
||||
assert self.name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
|
||||
try:
|
||||
return option.check_value(key, val)
|
||||
except optparse.OptionValueError as exc:
|
||||
print(f"An error occurred during configuration: {exc}")
|
||||
sys.exit(3)
|
||||
|
||||
def _get_ordered_configuration_items(
|
||||
self,
|
||||
) -> Generator[Tuple[str, Any], None, None]:
|
||||
# Configuration gives keys in an unordered manner. Order them.
|
||||
override_order = ["global", self.name, ":env:"]
|
||||
|
||||
# Pool the options into different groups
|
||||
section_items: Dict[str, List[Tuple[str, Any]]] = {
|
||||
name: [] for name in override_order
|
||||
}
|
||||
for section_key, val in self.config.items():
|
||||
# ignore empty values
|
||||
if not val:
|
||||
logger.debug(
|
||||
"Ignoring configuration key '%s' as it's value is empty.",
|
||||
section_key,
|
||||
)
|
||||
continue
|
||||
|
||||
section, key = section_key.split(".", 1)
|
||||
if section in override_order:
|
||||
section_items[section].append((key, val))
|
||||
|
||||
# Yield each group in their override order
|
||||
for section in override_order:
|
||||
for key, val in section_items[section]:
|
||||
yield key, val
|
||||
|
||||
def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Updates the given defaults with values from the config files and
|
||||
the environ. Does a little special handling for certain types of
|
||||
options (lists)."""
|
||||
|
||||
# Accumulate complex default state.
|
||||
self.values = optparse.Values(self.defaults)
|
||||
late_eval = set()
|
||||
# Then set the options with those values
|
||||
for key, val in self._get_ordered_configuration_items():
|
||||
# '--' because configuration supports only long names
|
||||
option = self.get_option("--" + key)
|
||||
|
||||
# Ignore options not present in this parser. E.g. non-globals put
|
||||
# in [global] by users that want them to apply to all applicable
|
||||
# commands.
|
||||
if option is None:
|
||||
continue
|
||||
|
||||
assert option.dest is not None
|
||||
|
||||
if option.action in ("store_true", "store_false"):
|
||||
try:
|
||||
val = strtobool(val)
|
||||
except ValueError:
|
||||
self.error(
|
||||
f"{val} is not a valid value for {key} option, "
|
||||
"please specify a boolean value like yes/no, "
|
||||
"true/false or 1/0 instead."
|
||||
)
|
||||
elif option.action == "count":
|
||||
with suppress(ValueError):
|
||||
val = strtobool(val)
|
||||
with suppress(ValueError):
|
||||
val = int(val)
|
||||
if not isinstance(val, int) or val < 0:
|
||||
self.error(
|
||||
f"{val} is not a valid value for {key} option, "
|
||||
"please instead specify either a non-negative integer "
|
||||
"or a boolean value like yes/no or false/true "
|
||||
"which is equivalent to 1/0."
|
||||
)
|
||||
elif option.action == "append":
|
||||
val = val.split()
|
||||
val = [self.check_default(option, key, v) for v in val]
|
||||
elif option.action == "callback":
|
||||
assert option.callback is not None
|
||||
late_eval.add(option.dest)
|
||||
opt_str = option.get_opt_string()
|
||||
val = option.convert_value(opt_str, val)
|
||||
# From take_action
|
||||
args = option.callback_args or ()
|
||||
kwargs = option.callback_kwargs or {}
|
||||
option.callback(option, opt_str, val, self, *args, **kwargs)
|
||||
else:
|
||||
val = self.check_default(option, key, val)
|
||||
|
||||
defaults[option.dest] = val
|
||||
|
||||
for key in late_eval:
|
||||
defaults[key] = getattr(self.values, key)
|
||||
self.values = None
|
||||
return defaults
|
||||
|
||||
def get_default_values(self) -> optparse.Values:
|
||||
"""Overriding to make updating the defaults after instantiation of
|
||||
the option parser possible, _update_defaults() does the dirty work."""
|
||||
if not self.process_default_values:
|
||||
# Old, pre-Optik 1.5 behaviour.
|
||||
return optparse.Values(self.defaults)
|
||||
|
||||
# Load the configuration, or error out in case of an error
|
||||
try:
|
||||
self.config.load()
|
||||
except ConfigurationError as err:
|
||||
self.exit(UNKNOWN_ERROR, str(err))
|
||||
|
||||
defaults = self._update_defaults(self.defaults.copy()) # ours
|
||||
for option in self._get_all_options():
|
||||
assert option.dest is not None
|
||||
default = defaults.get(option.dest)
|
||||
if isinstance(default, str):
|
||||
opt_str = option.get_opt_string()
|
||||
defaults[option.dest] = option.check_value(opt_str, default)
|
||||
return optparse.Values(defaults)
|
||||
|
||||
def error(self, msg: str) -> NoReturn:
|
||||
self.print_usage(sys.stderr)
|
||||
self.exit(UNKNOWN_ERROR, f"{msg}\n")
|
94
Dependencies/Python/Lib/site-packages/pip/_internal/cli/progress_bars.py
vendored
Normal file
94
Dependencies/Python/Lib/site-packages/pip/_internal/cli/progress_bars.py
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
import functools
|
||||
import sys
|
||||
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
|
||||
|
||||
from pip._vendor.rich.progress import (
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
FileSizeColumn,
|
||||
Progress,
|
||||
ProgressColumn,
|
||||
SpinnerColumn,
|
||||
TextColumn,
|
||||
TimeElapsedColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
|
||||
from pip._internal.cli.spinners import RateLimiter
|
||||
from pip._internal.utils.logging import get_indentation
|
||||
|
||||
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
|
||||
|
||||
|
||||
def _rich_progress_bar(
|
||||
iterable: Iterable[bytes],
|
||||
*,
|
||||
bar_type: str,
|
||||
size: Optional[int],
|
||||
) -> Generator[bytes, None, None]:
|
||||
assert bar_type == "on", "This should only be used in the default mode."
|
||||
|
||||
if not size:
|
||||
total = float("inf")
|
||||
columns: Tuple[ProgressColumn, ...] = (
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
SpinnerColumn("line", speed=1.5),
|
||||
FileSizeColumn(),
|
||||
TransferSpeedColumn(),
|
||||
TimeElapsedColumn(),
|
||||
)
|
||||
else:
|
||||
total = size
|
||||
columns = (
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
BarColumn(),
|
||||
DownloadColumn(),
|
||||
TransferSpeedColumn(),
|
||||
TextColumn("eta"),
|
||||
TimeRemainingColumn(),
|
||||
)
|
||||
|
||||
progress = Progress(*columns, refresh_per_second=5)
|
||||
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
|
||||
with progress:
|
||||
for chunk in iterable:
|
||||
yield chunk
|
||||
progress.update(task_id, advance=len(chunk))
|
||||
|
||||
|
||||
def _raw_progress_bar(
|
||||
iterable: Iterable[bytes],
|
||||
*,
|
||||
size: Optional[int],
|
||||
) -> Generator[bytes, None, None]:
|
||||
def write_progress(current: int, total: int) -> None:
|
||||
sys.stdout.write(f"Progress {current} of {total}\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
current = 0
|
||||
total = size or 0
|
||||
rate_limiter = RateLimiter(0.25)
|
||||
|
||||
write_progress(current, total)
|
||||
for chunk in iterable:
|
||||
current += len(chunk)
|
||||
if rate_limiter.ready() or current == total:
|
||||
write_progress(current, total)
|
||||
rate_limiter.reset()
|
||||
yield chunk
|
||||
|
||||
|
||||
def get_download_progress_renderer(
|
||||
*, bar_type: str, size: Optional[int] = None
|
||||
) -> DownloadProgressRenderer:
|
||||
"""Get an object that can be used to render the download progress.
|
||||
|
||||
Returns a callable, that takes an iterable to "wrap".
|
||||
"""
|
||||
if bar_type == "on":
|
||||
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
|
||||
elif bar_type == "raw":
|
||||
return functools.partial(_raw_progress_bar, size=size)
|
||||
else:
|
||||
return iter # no-op, when passed an iterator
|
329
Dependencies/Python/Lib/site-packages/pip/_internal/cli/req_command.py
vendored
Normal file
329
Dependencies/Python/Lib/site-packages/pip/_internal/cli/req_command.py
vendored
Normal file
@ -0,0 +1,329 @@
|
||||
"""Contains the RequirementCommand base class.
|
||||
|
||||
This class is in a separate module so the commands that do not always
|
||||
need PackageFinder capability don't unnecessarily import the
|
||||
PackageFinder machinery and all its vendored dependencies, etc.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from functools import partial
|
||||
from optparse import Values
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.index_command import IndexGroupCommand
|
||||
from pip._internal.cli.index_command import SessionCommandMixin as SessionCommandMixin
|
||||
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
||||
from pip._internal.index.collector import LinkCollector
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.models.target_python import TargetPython
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable,
|
||||
install_req_from_line,
|
||||
install_req_from_parsed_requirement,
|
||||
install_req_from_req_string,
|
||||
)
|
||||
from pip._internal.req.req_file import parse_requirements
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.resolution.base import BaseResolver
|
||||
from pip._internal.utils.temp_dir import (
|
||||
TempDirectory,
|
||||
TempDirectoryTypeRegistry,
|
||||
tempdir_kinds,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
KEEPABLE_TEMPDIR_TYPES = [
|
||||
tempdir_kinds.BUILD_ENV,
|
||||
tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||
tempdir_kinds.REQ_BUILD,
|
||||
]
|
||||
|
||||
|
||||
def with_cleanup(func: Any) -> Any:
|
||||
"""Decorator for common logic related to managing temporary
|
||||
directories.
|
||||
"""
|
||||
|
||||
def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
|
||||
for t in KEEPABLE_TEMPDIR_TYPES:
|
||||
registry.set_delete(t, False)
|
||||
|
||||
def wrapper(
|
||||
self: RequirementCommand, options: Values, args: List[Any]
|
||||
) -> Optional[int]:
|
||||
assert self.tempdir_registry is not None
|
||||
if options.no_clean:
|
||||
configure_tempdir_registry(self.tempdir_registry)
|
||||
|
||||
try:
|
||||
return func(self, options, args)
|
||||
except PreviousBuildDirError:
|
||||
# This kind of conflict can occur when the user passes an explicit
|
||||
# build directory with a pre-existing folder. In that case we do
|
||||
# not want to accidentally remove it.
|
||||
configure_tempdir_registry(self.tempdir_registry)
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class RequirementCommand(IndexGroupCommand):
|
||||
def __init__(self, *args: Any, **kw: Any) -> None:
|
||||
super().__init__(*args, **kw)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.no_clean())
|
||||
|
||||
@staticmethod
|
||||
def determine_resolver_variant(options: Values) -> str:
|
||||
"""Determines which resolver should be used, based on the given options."""
|
||||
if "legacy-resolver" in options.deprecated_features_enabled:
|
||||
return "legacy"
|
||||
|
||||
return "resolvelib"
|
||||
|
||||
@classmethod
|
||||
def make_requirement_preparer(
|
||||
cls,
|
||||
temp_build_dir: TempDirectory,
|
||||
options: Values,
|
||||
build_tracker: BuildTracker,
|
||||
session: PipSession,
|
||||
finder: PackageFinder,
|
||||
use_user_site: bool,
|
||||
download_dir: Optional[str] = None,
|
||||
verbosity: int = 0,
|
||||
) -> RequirementPreparer:
|
||||
"""
|
||||
Create a RequirementPreparer instance for the given parameters.
|
||||
"""
|
||||
temp_build_dir_path = temp_build_dir.path
|
||||
assert temp_build_dir_path is not None
|
||||
legacy_resolver = False
|
||||
|
||||
resolver_variant = cls.determine_resolver_variant(options)
|
||||
if resolver_variant == "resolvelib":
|
||||
lazy_wheel = "fast-deps" in options.features_enabled
|
||||
if lazy_wheel:
|
||||
logger.warning(
|
||||
"pip is using lazily downloaded wheels using HTTP "
|
||||
"range requests to obtain dependency information. "
|
||||
"This experimental feature is enabled through "
|
||||
"--use-feature=fast-deps and it is not ready for "
|
||||
"production."
|
||||
)
|
||||
else:
|
||||
legacy_resolver = True
|
||||
lazy_wheel = False
|
||||
if "fast-deps" in options.features_enabled:
|
||||
logger.warning(
|
||||
"fast-deps has no effect when used with the legacy resolver."
|
||||
)
|
||||
|
||||
return RequirementPreparer(
|
||||
build_dir=temp_build_dir_path,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=download_dir,
|
||||
build_isolation=options.build_isolation,
|
||||
check_build_deps=options.check_build_deps,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
progress_bar=options.progress_bar,
|
||||
finder=finder,
|
||||
require_hashes=options.require_hashes,
|
||||
use_user_site=use_user_site,
|
||||
lazy_wheel=lazy_wheel,
|
||||
verbosity=verbosity,
|
||||
legacy_resolver=legacy_resolver,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def make_resolver(
|
||||
cls,
|
||||
preparer: RequirementPreparer,
|
||||
finder: PackageFinder,
|
||||
options: Values,
|
||||
wheel_cache: Optional[WheelCache] = None,
|
||||
use_user_site: bool = False,
|
||||
ignore_installed: bool = True,
|
||||
ignore_requires_python: bool = False,
|
||||
force_reinstall: bool = False,
|
||||
upgrade_strategy: str = "to-satisfy-only",
|
||||
use_pep517: Optional[bool] = None,
|
||||
py_version_info: Optional[Tuple[int, ...]] = None,
|
||||
) -> BaseResolver:
|
||||
"""
|
||||
Create a Resolver instance for the given parameters.
|
||||
"""
|
||||
make_install_req = partial(
|
||||
install_req_from_req_string,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=use_pep517,
|
||||
)
|
||||
resolver_variant = cls.determine_resolver_variant(options)
|
||||
# The long import name and duplicated invocation is needed to convince
|
||||
# Mypy into correctly typechecking. Otherwise it would complain the
|
||||
# "Resolver" class being redefined.
|
||||
if resolver_variant == "resolvelib":
|
||||
import pip._internal.resolution.resolvelib.resolver
|
||||
|
||||
return pip._internal.resolution.resolvelib.resolver.Resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
wheel_cache=wheel_cache,
|
||||
make_install_req=make_install_req,
|
||||
use_user_site=use_user_site,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_installed=ignore_installed,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
force_reinstall=force_reinstall,
|
||||
upgrade_strategy=upgrade_strategy,
|
||||
py_version_info=py_version_info,
|
||||
)
|
||||
import pip._internal.resolution.legacy.resolver
|
||||
|
||||
return pip._internal.resolution.legacy.resolver.Resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
wheel_cache=wheel_cache,
|
||||
make_install_req=make_install_req,
|
||||
use_user_site=use_user_site,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_installed=ignore_installed,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
force_reinstall=force_reinstall,
|
||||
upgrade_strategy=upgrade_strategy,
|
||||
py_version_info=py_version_info,
|
||||
)
|
||||
|
||||
def get_requirements(
|
||||
self,
|
||||
args: List[str],
|
||||
options: Values,
|
||||
finder: PackageFinder,
|
||||
session: PipSession,
|
||||
) -> List[InstallRequirement]:
|
||||
"""
|
||||
Parse command-line arguments into the corresponding requirements.
|
||||
"""
|
||||
requirements: List[InstallRequirement] = []
|
||||
for filename in options.constraints:
|
||||
for parsed_req in parse_requirements(
|
||||
filename,
|
||||
constraint=True,
|
||||
finder=finder,
|
||||
options=options,
|
||||
session=session,
|
||||
):
|
||||
req_to_add = install_req_from_parsed_requirement(
|
||||
parsed_req,
|
||||
isolated=options.isolated_mode,
|
||||
user_supplied=False,
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
for req in args:
|
||||
req_to_add = install_req_from_line(
|
||||
req,
|
||||
comes_from=None,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
user_supplied=True,
|
||||
config_settings=getattr(options, "config_settings", None),
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
for req in options.editables:
|
||||
req_to_add = install_req_from_editable(
|
||||
req,
|
||||
user_supplied=True,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
config_settings=getattr(options, "config_settings", None),
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
# NOTE: options.require_hashes may be set if --require-hashes is True
|
||||
for filename in options.requirements:
|
||||
for parsed_req in parse_requirements(
|
||||
filename, finder=finder, options=options, session=session
|
||||
):
|
||||
req_to_add = install_req_from_parsed_requirement(
|
||||
parsed_req,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
user_supplied=True,
|
||||
config_settings=(
|
||||
parsed_req.options.get("config_settings")
|
||||
if parsed_req.options
|
||||
else None
|
||||
),
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
# If any requirement has hash options, enable hash checking.
|
||||
if any(req.has_hash_options for req in requirements):
|
||||
options.require_hashes = True
|
||||
|
||||
if not (args or options.editables or options.requirements):
|
||||
opts = {"name": self.name}
|
||||
if options.find_links:
|
||||
raise CommandError(
|
||||
"You must give at least one requirement to {name} "
|
||||
'(maybe you meant "pip {name} {links}"?)'.format(
|
||||
**dict(opts, links=" ".join(options.find_links))
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise CommandError(
|
||||
"You must give at least one requirement to {name} "
|
||||
'(see "pip help {name}")'.format(**opts)
|
||||
)
|
||||
|
||||
return requirements
|
||||
|
||||
@staticmethod
|
||||
def trace_basic_info(finder: PackageFinder) -> None:
|
||||
"""
|
||||
Trace basic information about the provided objects.
|
||||
"""
|
||||
# Display where finder is looking for packages
|
||||
search_scope = finder.search_scope
|
||||
locations = search_scope.get_formatted_locations()
|
||||
if locations:
|
||||
logger.info(locations)
|
||||
|
||||
def _build_package_finder(
|
||||
self,
|
||||
options: Values,
|
||||
session: PipSession,
|
||||
target_python: Optional[TargetPython] = None,
|
||||
ignore_requires_python: Optional[bool] = None,
|
||||
) -> PackageFinder:
|
||||
"""
|
||||
Create a package finder appropriate to this requirement command.
|
||||
|
||||
:param ignore_requires_python: Whether to ignore incompatible
|
||||
"Requires-Python" values in links. Defaults to False.
|
||||
"""
|
||||
link_collector = LinkCollector.create(session, options=options)
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=True,
|
||||
format_control=options.format_control,
|
||||
allow_all_prereleases=options.pre,
|
||||
prefer_binary=options.prefer_binary,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
)
|
||||
|
||||
return PackageFinder.create(
|
||||
link_collector=link_collector,
|
||||
selection_prefs=selection_prefs,
|
||||
target_python=target_python,
|
||||
)
|
159
Dependencies/Python/Lib/site-packages/pip/_internal/cli/spinners.py
vendored
Normal file
159
Dependencies/Python/Lib/site-packages/pip/_internal/cli/spinners.py
vendored
Normal file
@ -0,0 +1,159 @@
|
||||
import contextlib
|
||||
import itertools
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from typing import IO, Generator, Optional
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import get_indentation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SpinnerInterface:
|
||||
def spin(self) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def finish(self, final_status: str) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class InteractiveSpinner(SpinnerInterface):
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
file: Optional[IO[str]] = None,
|
||||
spin_chars: str = "-\\|/",
|
||||
# Empirically, 8 updates/second looks nice
|
||||
min_update_interval_seconds: float = 0.125,
|
||||
):
|
||||
self._message = message
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
self._file = file
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._finished = False
|
||||
|
||||
self._spin_cycle = itertools.cycle(spin_chars)
|
||||
|
||||
self._file.write(" " * get_indentation() + self._message + " ... ")
|
||||
self._width = 0
|
||||
|
||||
def _write(self, status: str) -> None:
|
||||
assert not self._finished
|
||||
# Erase what we wrote before by backspacing to the beginning, writing
|
||||
# spaces to overwrite the old text, and then backspacing again
|
||||
backup = "\b" * self._width
|
||||
self._file.write(backup + " " * self._width + backup)
|
||||
# Now we have a blank slate to add our status
|
||||
self._file.write(status)
|
||||
self._width = len(status)
|
||||
self._file.flush()
|
||||
self._rate_limiter.reset()
|
||||
|
||||
def spin(self) -> None:
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._write(next(self._spin_cycle))
|
||||
|
||||
def finish(self, final_status: str) -> None:
|
||||
if self._finished:
|
||||
return
|
||||
self._write(final_status)
|
||||
self._file.write("\n")
|
||||
self._file.flush()
|
||||
self._finished = True
|
||||
|
||||
|
||||
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
||||
# We still print updates occasionally (once every 60 seconds by default) to
|
||||
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
||||
# an indication that a task has frozen.
|
||||
class NonInteractiveSpinner(SpinnerInterface):
|
||||
def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
|
||||
self._message = message
|
||||
self._finished = False
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._update("started")
|
||||
|
||||
def _update(self, status: str) -> None:
|
||||
assert not self._finished
|
||||
self._rate_limiter.reset()
|
||||
logger.info("%s: %s", self._message, status)
|
||||
|
||||
def spin(self) -> None:
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._update("still running...")
|
||||
|
||||
def finish(self, final_status: str) -> None:
|
||||
if self._finished:
|
||||
return
|
||||
self._update(f"finished with status '{final_status}'")
|
||||
self._finished = True
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
def __init__(self, min_update_interval_seconds: float) -> None:
|
||||
self._min_update_interval_seconds = min_update_interval_seconds
|
||||
self._last_update: float = 0
|
||||
|
||||
def ready(self) -> bool:
|
||||
now = time.time()
|
||||
delta = now - self._last_update
|
||||
return delta >= self._min_update_interval_seconds
|
||||
|
||||
def reset(self) -> None:
|
||||
self._last_update = time.time()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
|
||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||
# through the logging system, but it acts like it has level INFO,
|
||||
# i.e. it's only displayed if we're at level INFO or better.
|
||||
# Non-interactive spinner goes through the logging system, so it is always
|
||||
# in sync with logging configuration.
|
||||
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
||||
spinner: SpinnerInterface = InteractiveSpinner(message)
|
||||
else:
|
||||
spinner = NonInteractiveSpinner(message)
|
||||
try:
|
||||
with hidden_cursor(sys.stdout):
|
||||
yield spinner
|
||||
except KeyboardInterrupt:
|
||||
spinner.finish("canceled")
|
||||
raise
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
raise
|
||||
else:
|
||||
spinner.finish("done")
|
||||
|
||||
|
||||
HIDE_CURSOR = "\x1b[?25l"
|
||||
SHOW_CURSOR = "\x1b[?25h"
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||
# even via colorama. So don't even try.
|
||||
if WINDOWS:
|
||||
yield
|
||||
# We don't want to clutter the output with control characters if we're
|
||||
# writing to a file, or if the user is running with --quiet.
|
||||
# See https://github.com/pypa/pip/issues/3418
|
||||
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
||||
yield
|
||||
else:
|
||||
file.write(HIDE_CURSOR)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
file.write(SHOW_CURSOR)
|
6
Dependencies/Python/Lib/site-packages/pip/_internal/cli/status_codes.py
vendored
Normal file
6
Dependencies/Python/Lib/site-packages/pip/_internal/cli/status_codes.py
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
SUCCESS = 0
|
||||
ERROR = 1
|
||||
UNKNOWN_ERROR = 2
|
||||
VIRTUALENV_NOT_FOUND = 3
|
||||
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||
NO_MATCHES_FOUND = 23
|
132
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__init__.py
vendored
Normal file
132
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__init__.py
vendored
Normal file
@ -0,0 +1,132 @@
|
||||
"""
|
||||
Package containing all pip commands
|
||||
"""
|
||||
|
||||
import importlib
|
||||
from collections import namedtuple
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
|
||||
CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
|
||||
|
||||
# This dictionary does a bunch of heavy lifting for help output:
|
||||
# - Enables avoiding additional (costly) imports for presenting `--help`.
|
||||
# - The ordering matters for help display.
|
||||
#
|
||||
# Even though the module path starts with the same "pip._internal.commands"
|
||||
# prefix, the full path makes testing easier (specifically when modifying
|
||||
# `commands_dict` in test setup / teardown).
|
||||
commands_dict: Dict[str, CommandInfo] = {
|
||||
"install": CommandInfo(
|
||||
"pip._internal.commands.install",
|
||||
"InstallCommand",
|
||||
"Install packages.",
|
||||
),
|
||||
"download": CommandInfo(
|
||||
"pip._internal.commands.download",
|
||||
"DownloadCommand",
|
||||
"Download packages.",
|
||||
),
|
||||
"uninstall": CommandInfo(
|
||||
"pip._internal.commands.uninstall",
|
||||
"UninstallCommand",
|
||||
"Uninstall packages.",
|
||||
),
|
||||
"freeze": CommandInfo(
|
||||
"pip._internal.commands.freeze",
|
||||
"FreezeCommand",
|
||||
"Output installed packages in requirements format.",
|
||||
),
|
||||
"inspect": CommandInfo(
|
||||
"pip._internal.commands.inspect",
|
||||
"InspectCommand",
|
||||
"Inspect the python environment.",
|
||||
),
|
||||
"list": CommandInfo(
|
||||
"pip._internal.commands.list",
|
||||
"ListCommand",
|
||||
"List installed packages.",
|
||||
),
|
||||
"show": CommandInfo(
|
||||
"pip._internal.commands.show",
|
||||
"ShowCommand",
|
||||
"Show information about installed packages.",
|
||||
),
|
||||
"check": CommandInfo(
|
||||
"pip._internal.commands.check",
|
||||
"CheckCommand",
|
||||
"Verify installed packages have compatible dependencies.",
|
||||
),
|
||||
"config": CommandInfo(
|
||||
"pip._internal.commands.configuration",
|
||||
"ConfigurationCommand",
|
||||
"Manage local and global configuration.",
|
||||
),
|
||||
"search": CommandInfo(
|
||||
"pip._internal.commands.search",
|
||||
"SearchCommand",
|
||||
"Search PyPI for packages.",
|
||||
),
|
||||
"cache": CommandInfo(
|
||||
"pip._internal.commands.cache",
|
||||
"CacheCommand",
|
||||
"Inspect and manage pip's wheel cache.",
|
||||
),
|
||||
"index": CommandInfo(
|
||||
"pip._internal.commands.index",
|
||||
"IndexCommand",
|
||||
"Inspect information available from package indexes.",
|
||||
),
|
||||
"wheel": CommandInfo(
|
||||
"pip._internal.commands.wheel",
|
||||
"WheelCommand",
|
||||
"Build wheels from your requirements.",
|
||||
),
|
||||
"hash": CommandInfo(
|
||||
"pip._internal.commands.hash",
|
||||
"HashCommand",
|
||||
"Compute hashes of package archives.",
|
||||
),
|
||||
"completion": CommandInfo(
|
||||
"pip._internal.commands.completion",
|
||||
"CompletionCommand",
|
||||
"A helper command used for command completion.",
|
||||
),
|
||||
"debug": CommandInfo(
|
||||
"pip._internal.commands.debug",
|
||||
"DebugCommand",
|
||||
"Show information useful for debugging.",
|
||||
),
|
||||
"help": CommandInfo(
|
||||
"pip._internal.commands.help",
|
||||
"HelpCommand",
|
||||
"Show help for commands.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def create_command(name: str, **kwargs: Any) -> Command:
|
||||
"""
|
||||
Create an instance of the Command class with the given name.
|
||||
"""
|
||||
module_path, class_name, summary = commands_dict[name]
|
||||
module = importlib.import_module(module_path)
|
||||
command_class = getattr(module, class_name)
|
||||
command = command_class(name=name, summary=summary, **kwargs)
|
||||
|
||||
return command
|
||||
|
||||
|
||||
def get_similar_commands(name: str) -> Optional[str]:
|
||||
"""Command name auto-correct."""
|
||||
from difflib import get_close_matches
|
||||
|
||||
name = name.lower()
|
||||
|
||||
close_commands = get_close_matches(name, commands_dict.keys())
|
||||
|
||||
if close_commands:
|
||||
return close_commands[0]
|
||||
else:
|
||||
return None
|
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-313.pyc
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/index.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/index.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-313.pyc
vendored
Normal file
Binary file not shown.
228
Dependencies/Python/Lib/site-packages/pip/_internal/commands/cache.py
vendored
Normal file
228
Dependencies/Python/Lib/site-packages/pip/_internal/commands/cache.py
vendored
Normal file
@ -0,0 +1,228 @@
|
||||
import os
|
||||
import textwrap
|
||||
from optparse import Values
|
||||
from typing import Any, List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, PipError
|
||||
from pip._internal.utils import filesystem
|
||||
from pip._internal.utils.logging import getLogger
|
||||
from pip._internal.utils.misc import format_size
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class CacheCommand(Command):
|
||||
"""
|
||||
Inspect and manage pip's wheel cache.
|
||||
|
||||
Subcommands:
|
||||
|
||||
- dir: Show the cache directory.
|
||||
- info: Show information about the cache.
|
||||
- list: List filenames of packages stored in the cache.
|
||||
- remove: Remove one or more package from the cache.
|
||||
- purge: Remove all items from the cache.
|
||||
|
||||
``<pattern>`` can be a glob expression or a package name.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog dir
|
||||
%prog info
|
||||
%prog list [<pattern>] [--format=[human, abspath]]
|
||||
%prog remove <pattern>
|
||||
%prog purge
|
||||
"""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"--format",
|
||||
action="store",
|
||||
dest="list_format",
|
||||
default="human",
|
||||
choices=("human", "abspath"),
|
||||
help="Select the output format among: human (default) or abspath",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
handlers = {
|
||||
"dir": self.get_cache_dir,
|
||||
"info": self.get_cache_info,
|
||||
"list": self.list_cache_items,
|
||||
"remove": self.remove_cache_items,
|
||||
"purge": self.purge_cache,
|
||||
}
|
||||
|
||||
if not options.cache_dir:
|
||||
logger.error("pip cache commands can not function since cache is disabled.")
|
||||
return ERROR
|
||||
|
||||
# Determine action
|
||||
if not args or args[0] not in handlers:
|
||||
logger.error(
|
||||
"Need an action (%s) to perform.",
|
||||
", ".join(sorted(handlers)),
|
||||
)
|
||||
return ERROR
|
||||
|
||||
action = args[0]
|
||||
|
||||
# Error handling happens here, not in the action-handlers.
|
||||
try:
|
||||
handlers[action](options, args[1:])
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
return SUCCESS
|
||||
|
||||
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
||||
if args:
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
logger.info(options.cache_dir)
|
||||
|
||||
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
||||
if args:
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
num_http_files = len(self._find_http_files(options))
|
||||
num_packages = len(self._find_wheels(options, "*"))
|
||||
|
||||
http_cache_location = self._cache_dir(options, "http-v2")
|
||||
old_http_cache_location = self._cache_dir(options, "http")
|
||||
wheels_cache_location = self._cache_dir(options, "wheels")
|
||||
http_cache_size = filesystem.format_size(
|
||||
filesystem.directory_size(http_cache_location)
|
||||
+ filesystem.directory_size(old_http_cache_location)
|
||||
)
|
||||
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
||||
|
||||
message = (
|
||||
textwrap.dedent(
|
||||
"""
|
||||
Package index page cache location (pip v23.3+): {http_cache_location}
|
||||
Package index page cache location (older pips): {old_http_cache_location}
|
||||
Package index page cache size: {http_cache_size}
|
||||
Number of HTTP files: {num_http_files}
|
||||
Locally built wheels location: {wheels_cache_location}
|
||||
Locally built wheels size: {wheels_cache_size}
|
||||
Number of locally built wheels: {package_count}
|
||||
""" # noqa: E501
|
||||
)
|
||||
.format(
|
||||
http_cache_location=http_cache_location,
|
||||
old_http_cache_location=old_http_cache_location,
|
||||
http_cache_size=http_cache_size,
|
||||
num_http_files=num_http_files,
|
||||
wheels_cache_location=wheels_cache_location,
|
||||
package_count=num_packages,
|
||||
wheels_cache_size=wheels_cache_size,
|
||||
)
|
||||
.strip()
|
||||
)
|
||||
|
||||
logger.info(message)
|
||||
|
||||
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) > 1:
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
if args:
|
||||
pattern = args[0]
|
||||
else:
|
||||
pattern = "*"
|
||||
|
||||
files = self._find_wheels(options, pattern)
|
||||
if options.list_format == "human":
|
||||
self.format_for_human(files)
|
||||
else:
|
||||
self.format_for_abspath(files)
|
||||
|
||||
def format_for_human(self, files: List[str]) -> None:
|
||||
if not files:
|
||||
logger.info("No locally built wheels cached.")
|
||||
return
|
||||
|
||||
results = []
|
||||
for filename in files:
|
||||
wheel = os.path.basename(filename)
|
||||
size = filesystem.format_file_size(filename)
|
||||
results.append(f" - {wheel} ({size})")
|
||||
logger.info("Cache contents:\n")
|
||||
logger.info("\n".join(sorted(results)))
|
||||
|
||||
def format_for_abspath(self, files: List[str]) -> None:
|
||||
if files:
|
||||
logger.info("\n".join(sorted(files)))
|
||||
|
||||
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) > 1:
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
if not args:
|
||||
raise CommandError("Please provide a pattern")
|
||||
|
||||
files = self._find_wheels(options, args[0])
|
||||
|
||||
no_matching_msg = "No matching packages"
|
||||
if args[0] == "*":
|
||||
# Only fetch http files if no specific pattern given
|
||||
files += self._find_http_files(options)
|
||||
else:
|
||||
# Add the pattern to the log message
|
||||
no_matching_msg += f' for pattern "{args[0]}"'
|
||||
|
||||
if not files:
|
||||
logger.warning(no_matching_msg)
|
||||
|
||||
bytes_removed = 0
|
||||
for filename in files:
|
||||
bytes_removed += os.stat(filename).st_size
|
||||
os.unlink(filename)
|
||||
logger.verbose("Removed %s", filename)
|
||||
logger.info("Files removed: %s (%s)", len(files), format_size(bytes_removed))
|
||||
|
||||
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
||||
if args:
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
return self.remove_cache_items(options, ["*"])
|
||||
|
||||
def _cache_dir(self, options: Values, subdir: str) -> str:
|
||||
return os.path.join(options.cache_dir, subdir)
|
||||
|
||||
def _find_http_files(self, options: Values) -> List[str]:
|
||||
old_http_dir = self._cache_dir(options, "http")
|
||||
new_http_dir = self._cache_dir(options, "http-v2")
|
||||
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
|
||||
new_http_dir, "*"
|
||||
)
|
||||
|
||||
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
||||
wheel_dir = self._cache_dir(options, "wheels")
|
||||
|
||||
# The wheel filename format, as specified in PEP 427, is:
|
||||
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
||||
#
|
||||
# Additionally, non-alphanumeric values in the distribution are
|
||||
# normalized to underscores (_), meaning hyphens can never occur
|
||||
# before `-{version}`.
|
||||
#
|
||||
# Given that information:
|
||||
# - If the pattern we're given contains a hyphen (-), the user is
|
||||
# providing at least the version. Thus, we can just append `*.whl`
|
||||
# to match the rest of it.
|
||||
# - If the pattern we're given doesn't contain a hyphen (-), the
|
||||
# user is only providing the name. Thus, we append `-*.whl` to
|
||||
# match the hyphen before the version, followed by anything else.
|
||||
#
|
||||
# PEP 427: https://www.python.org/dev/peps/pep-0427/
|
||||
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
|
||||
|
||||
return filesystem.find_files(wheel_dir, pattern)
|
67
Dependencies/Python/Lib/site-packages/pip/_internal/commands/check.py
vendored
Normal file
67
Dependencies/Python/Lib/site-packages/pip/_internal/commands/check.py
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.metadata import get_default_environment
|
||||
from pip._internal.operations.check import (
|
||||
check_package_set,
|
||||
check_unsupported,
|
||||
create_package_set_from_installed,
|
||||
)
|
||||
from pip._internal.utils.compatibility_tags import get_supported
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CheckCommand(Command):
|
||||
"""Verify installed packages have compatible dependencies."""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
package_set, parsing_probs = create_package_set_from_installed()
|
||||
missing, conflicting = check_package_set(package_set)
|
||||
unsupported = list(
|
||||
check_unsupported(
|
||||
get_default_environment().iter_installed_distributions(),
|
||||
get_supported(),
|
||||
)
|
||||
)
|
||||
|
||||
for project_name in missing:
|
||||
version = package_set[project_name].version
|
||||
for dependency in missing[project_name]:
|
||||
write_output(
|
||||
"%s %s requires %s, which is not installed.",
|
||||
project_name,
|
||||
version,
|
||||
dependency[0],
|
||||
)
|
||||
|
||||
for project_name in conflicting:
|
||||
version = package_set[project_name].version
|
||||
for dep_name, dep_version, req in conflicting[project_name]:
|
||||
write_output(
|
||||
"%s %s has requirement %s, but you have %s %s.",
|
||||
project_name,
|
||||
version,
|
||||
req,
|
||||
dep_name,
|
||||
dep_version,
|
||||
)
|
||||
for package in unsupported:
|
||||
write_output(
|
||||
"%s %s is not supported on this platform",
|
||||
package.raw_name,
|
||||
package.version,
|
||||
)
|
||||
if missing or conflicting or parsing_probs or unsupported:
|
||||
return ERROR
|
||||
else:
|
||||
write_output("No broken requirements found.")
|
||||
return SUCCESS
|
130
Dependencies/Python/Lib/site-packages/pip/_internal/commands/completion.py
vendored
Normal file
130
Dependencies/Python/Lib/site-packages/pip/_internal/commands/completion.py
vendored
Normal file
@ -0,0 +1,130 @@
|
||||
import sys
|
||||
import textwrap
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.utils.misc import get_prog
|
||||
|
||||
BASE_COMPLETION = """
|
||||
# pip {shell} completion start{script}# pip {shell} completion end
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPTS = {
|
||||
"bash": """
|
||||
_pip_completion()
|
||||
{{
|
||||
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
|
||||
}}
|
||||
complete -o default -F _pip_completion {prog}
|
||||
""",
|
||||
"zsh": """
|
||||
#compdef -P pip[0-9.]#
|
||||
__pip() {{
|
||||
compadd $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$((CURRENT-1)) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
|
||||
}}
|
||||
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
|
||||
# autoload from fpath, call function directly
|
||||
__pip "$@"
|
||||
else
|
||||
# eval/source/. command, register function for later
|
||||
compdef __pip -P 'pip[0-9.]#'
|
||||
fi
|
||||
""",
|
||||
"fish": """
|
||||
function __fish_complete_pip
|
||||
set -lx COMP_WORDS (commandline -o) ""
|
||||
set -lx COMP_CWORD ( \\
|
||||
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
||||
)
|
||||
set -lx PIP_AUTO_COMPLETE 1
|
||||
string split \\ -- (eval $COMP_WORDS[1])
|
||||
end
|
||||
complete -fa "(__fish_complete_pip)" -c {prog}
|
||||
""",
|
||||
"powershell": """
|
||||
if ((Test-Path Function:\\TabExpansion) -and -not `
|
||||
(Test-Path Function:\\_pip_completeBackup)) {{
|
||||
Rename-Item Function:\\TabExpansion _pip_completeBackup
|
||||
}}
|
||||
function TabExpansion($line, $lastWord) {{
|
||||
$lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
|
||||
if ($lastBlock.StartsWith("{prog} ")) {{
|
||||
$Env:COMP_WORDS=$lastBlock
|
||||
$Env:COMP_CWORD=$lastBlock.Split().Length - 1
|
||||
$Env:PIP_AUTO_COMPLETE=1
|
||||
(& {prog}).Split()
|
||||
Remove-Item Env:COMP_WORDS
|
||||
Remove-Item Env:COMP_CWORD
|
||||
Remove-Item Env:PIP_AUTO_COMPLETE
|
||||
}}
|
||||
elseif (Test-Path Function:\\_pip_completeBackup) {{
|
||||
# Fall back on existing tab expansion
|
||||
_pip_completeBackup $line $lastWord
|
||||
}}
|
||||
}}
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
class CompletionCommand(Command):
|
||||
"""A helper command to be used for command completion."""
|
||||
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"--bash",
|
||||
"-b",
|
||||
action="store_const",
|
||||
const="bash",
|
||||
dest="shell",
|
||||
help="Emit completion code for bash",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--zsh",
|
||||
"-z",
|
||||
action="store_const",
|
||||
const="zsh",
|
||||
dest="shell",
|
||||
help="Emit completion code for zsh",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--fish",
|
||||
"-f",
|
||||
action="store_const",
|
||||
const="fish",
|
||||
dest="shell",
|
||||
help="Emit completion code for fish",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--powershell",
|
||||
"-p",
|
||||
action="store_const",
|
||||
const="powershell",
|
||||
dest="shell",
|
||||
help="Emit completion code for powershell",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
"""Prints the completion code of the given shell"""
|
||||
shells = COMPLETION_SCRIPTS.keys()
|
||||
shell_options = ["--" + shell for shell in sorted(shells)]
|
||||
if options.shell in shells:
|
||||
script = textwrap.dedent(
|
||||
COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
|
||||
)
|
||||
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
||||
return SUCCESS
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"ERROR: You must pass {}\n".format(" or ".join(shell_options))
|
||||
)
|
||||
return SUCCESS
|
280
Dependencies/Python/Lib/site-packages/pip/_internal/commands/configuration.py
vendored
Normal file
280
Dependencies/Python/Lib/site-packages/pip/_internal/commands/configuration.py
vendored
Normal file
@ -0,0 +1,280 @@
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from optparse import Values
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.configuration import (
|
||||
Configuration,
|
||||
Kind,
|
||||
get_configuration_files,
|
||||
kinds,
|
||||
)
|
||||
from pip._internal.exceptions import PipError
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import get_prog, write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationCommand(Command):
|
||||
"""
|
||||
Manage local and global configuration.
|
||||
|
||||
Subcommands:
|
||||
|
||||
- list: List the active configuration (or from the file specified)
|
||||
- edit: Edit the configuration file in an editor
|
||||
- get: Get the value associated with command.option
|
||||
- set: Set the command.option=value
|
||||
- unset: Unset the value associated with command.option
|
||||
- debug: List the configuration files and values defined under them
|
||||
|
||||
Configuration keys should be dot separated command and option name,
|
||||
with the special prefix "global" affecting any command. For example,
|
||||
"pip config set global.index-url https://example.org/" would configure
|
||||
the index url for all commands, but "pip config set download.timeout 10"
|
||||
would configure a 10 second timeout only for "pip download" commands.
|
||||
|
||||
If none of --user, --global and --site are passed, a virtual
|
||||
environment configuration file is used if one is active and the file
|
||||
exists. Otherwise, all modifications happen to the user file by
|
||||
default.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog [<file-option>] list
|
||||
%prog [<file-option>] [--editor <editor-path>] edit
|
||||
|
||||
%prog [<file-option>] get command.option
|
||||
%prog [<file-option>] set command.option value
|
||||
%prog [<file-option>] unset command.option
|
||||
%prog [<file-option>] debug
|
||||
"""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"--editor",
|
||||
dest="editor",
|
||||
action="store",
|
||||
default=None,
|
||||
help=(
|
||||
"Editor to use to edit the file. Uses VISUAL or EDITOR "
|
||||
"environment variables if not provided."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--global",
|
||||
dest="global_file",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Use the system-wide configuration file only",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--user",
|
||||
dest="user_file",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Use the user configuration file only",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--site",
|
||||
dest="site_file",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Use the current environment configuration file only",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
handlers = {
|
||||
"list": self.list_values,
|
||||
"edit": self.open_in_editor,
|
||||
"get": self.get_name,
|
||||
"set": self.set_name_value,
|
||||
"unset": self.unset_name,
|
||||
"debug": self.list_config_values,
|
||||
}
|
||||
|
||||
# Determine action
|
||||
if not args or args[0] not in handlers:
|
||||
logger.error(
|
||||
"Need an action (%s) to perform.",
|
||||
", ".join(sorted(handlers)),
|
||||
)
|
||||
return ERROR
|
||||
|
||||
action = args[0]
|
||||
|
||||
# Determine which configuration files are to be loaded
|
||||
# Depends on whether the command is modifying.
|
||||
try:
|
||||
load_only = self._determine_file(
|
||||
options, need_value=(action in ["get", "set", "unset", "edit"])
|
||||
)
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
# Load a new configuration
|
||||
self.configuration = Configuration(
|
||||
isolated=options.isolated_mode, load_only=load_only
|
||||
)
|
||||
self.configuration.load()
|
||||
|
||||
# Error handling happens here, not in the action-handlers.
|
||||
try:
|
||||
handlers[action](options, args[1:])
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
return SUCCESS
|
||||
|
||||
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
||||
file_options = [
|
||||
key
|
||||
for key, value in (
|
||||
(kinds.USER, options.user_file),
|
||||
(kinds.GLOBAL, options.global_file),
|
||||
(kinds.SITE, options.site_file),
|
||||
)
|
||||
if value
|
||||
]
|
||||
|
||||
if not file_options:
|
||||
if not need_value:
|
||||
return None
|
||||
# Default to user, unless there's a site file.
|
||||
elif any(
|
||||
os.path.exists(site_config_file)
|
||||
for site_config_file in get_configuration_files()[kinds.SITE]
|
||||
):
|
||||
return kinds.SITE
|
||||
else:
|
||||
return kinds.USER
|
||||
elif len(file_options) == 1:
|
||||
return file_options[0]
|
||||
|
||||
raise PipError(
|
||||
"Need exactly one file to operate upon "
|
||||
"(--user, --site, --global) to perform."
|
||||
)
|
||||
|
||||
def list_values(self, options: Values, args: List[str]) -> None:
|
||||
self._get_n_args(args, "list", n=0)
|
||||
|
||||
for key, value in sorted(self.configuration.items()):
|
||||
write_output("%s=%r", key, value)
|
||||
|
||||
def get_name(self, options: Values, args: List[str]) -> None:
|
||||
key = self._get_n_args(args, "get [name]", n=1)
|
||||
value = self.configuration.get_value(key)
|
||||
|
||||
write_output("%s", value)
|
||||
|
||||
def set_name_value(self, options: Values, args: List[str]) -> None:
|
||||
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
||||
self.configuration.set_value(key, value)
|
||||
|
||||
self._save_configuration()
|
||||
|
||||
def unset_name(self, options: Values, args: List[str]) -> None:
|
||||
key = self._get_n_args(args, "unset [name]", n=1)
|
||||
self.configuration.unset_value(key)
|
||||
|
||||
self._save_configuration()
|
||||
|
||||
def list_config_values(self, options: Values, args: List[str]) -> None:
|
||||
"""List config key-value pairs across different config files"""
|
||||
self._get_n_args(args, "debug", n=0)
|
||||
|
||||
self.print_env_var_values()
|
||||
# Iterate over config files and print if they exist, and the
|
||||
# key-value pairs present in them if they do
|
||||
for variant, files in sorted(self.configuration.iter_config_files()):
|
||||
write_output("%s:", variant)
|
||||
for fname in files:
|
||||
with indent_log():
|
||||
file_exists = os.path.exists(fname)
|
||||
write_output("%s, exists: %r", fname, file_exists)
|
||||
if file_exists:
|
||||
self.print_config_file_values(variant)
|
||||
|
||||
def print_config_file_values(self, variant: Kind) -> None:
|
||||
"""Get key-value pairs from the file of a variant"""
|
||||
for name, value in self.configuration.get_values_in_config(variant).items():
|
||||
with indent_log():
|
||||
write_output("%s: %s", name, value)
|
||||
|
||||
def print_env_var_values(self) -> None:
|
||||
"""Get key-values pairs present as environment variables"""
|
||||
write_output("%s:", "env_var")
|
||||
with indent_log():
|
||||
for key, value in sorted(self.configuration.get_environ_vars()):
|
||||
env_var = f"PIP_{key.upper()}"
|
||||
write_output("%s=%r", env_var, value)
|
||||
|
||||
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
||||
editor = self._determine_editor(options)
|
||||
|
||||
fname = self.configuration.get_file_to_edit()
|
||||
if fname is None:
|
||||
raise PipError("Could not determine appropriate file.")
|
||||
elif '"' in fname:
|
||||
# This shouldn't happen, unless we see a username like that.
|
||||
# If that happens, we'd appreciate a pull request fixing this.
|
||||
raise PipError(
|
||||
f'Can not open an editor for a file name containing "\n{fname}'
|
||||
)
|
||||
|
||||
try:
|
||||
subprocess.check_call(f'{editor} "{fname}"', shell=True)
|
||||
except FileNotFoundError as e:
|
||||
if not e.filename:
|
||||
e.filename = editor
|
||||
raise
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
|
||||
|
||||
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
||||
"""Helper to make sure the command got the right number of arguments"""
|
||||
if len(args) != n:
|
||||
msg = (
|
||||
f"Got unexpected number of arguments, expected {n}. "
|
||||
f'(example: "{get_prog()} config {example}")'
|
||||
)
|
||||
raise PipError(msg)
|
||||
|
||||
if n == 1:
|
||||
return args[0]
|
||||
else:
|
||||
return args
|
||||
|
||||
def _save_configuration(self) -> None:
|
||||
# We successfully ran a modifying command. Need to save the
|
||||
# configuration.
|
||||
try:
|
||||
self.configuration.save()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Unable to save configuration. Please report this as a bug."
|
||||
)
|
||||
raise PipError("Internal Error.")
|
||||
|
||||
def _determine_editor(self, options: Values) -> str:
|
||||
if options.editor is not None:
|
||||
return options.editor
|
||||
elif "VISUAL" in os.environ:
|
||||
return os.environ["VISUAL"]
|
||||
elif "EDITOR" in os.environ:
|
||||
return os.environ["EDITOR"]
|
||||
else:
|
||||
raise PipError("Could not determine editor to use.")
|
201
Dependencies/Python/Lib/site-packages/pip/_internal/commands/debug.py
vendored
Normal file
201
Dependencies/Python/Lib/site-packages/pip/_internal/commands/debug.py
vendored
Normal file
@ -0,0 +1,201 @@
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from optparse import Values
|
||||
from types import ModuleType
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import pip._vendor
|
||||
from pip._vendor.certifi import where
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.configuration import Configuration
|
||||
from pip._internal.metadata import get_environment
|
||||
from pip._internal.utils.compat import open_text_resource
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import get_pip_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def show_value(name: str, value: Any) -> None:
|
||||
logger.info("%s: %s", name, value)
|
||||
|
||||
|
||||
def show_sys_implementation() -> None:
|
||||
logger.info("sys.implementation:")
|
||||
implementation_name = sys.implementation.name
|
||||
with indent_log():
|
||||
show_value("name", implementation_name)
|
||||
|
||||
|
||||
def create_vendor_txt_map() -> Dict[str, str]:
|
||||
with open_text_resource("pip._vendor", "vendor.txt") as f:
|
||||
# Purge non version specifying lines.
|
||||
# Also, remove any space prefix or suffixes (including comments).
|
||||
lines = [
|
||||
line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
|
||||
]
|
||||
|
||||
# Transform into "module" -> version dict.
|
||||
return dict(line.split("==", 1) for line in lines)
|
||||
|
||||
|
||||
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
|
||||
# Module name can be uppercase in vendor.txt for some reason...
|
||||
module_name = module_name.lower().replace("-", "_")
|
||||
# PATCH: setuptools is actually only pkg_resources.
|
||||
if module_name == "setuptools":
|
||||
module_name = "pkg_resources"
|
||||
|
||||
try:
|
||||
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
||||
return getattr(pip._vendor, module_name)
|
||||
except ImportError:
|
||||
# We allow 'truststore' to fail to import due
|
||||
# to being unavailable on Python 3.9 and earlier.
|
||||
if module_name == "truststore" and sys.version_info < (3, 10):
|
||||
return None
|
||||
raise
|
||||
|
||||
|
||||
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
||||
module = get_module_from_module_name(module_name)
|
||||
version = getattr(module, "__version__", None)
|
||||
|
||||
if module and not version:
|
||||
# Try to find version in debundled module info.
|
||||
assert module.__file__ is not None
|
||||
env = get_environment([os.path.dirname(module.__file__)])
|
||||
dist = env.get_distribution(module_name)
|
||||
if dist:
|
||||
version = str(dist.version)
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
|
||||
"""Log the actual version and print extra info if there is
|
||||
a conflict or if the actual version could not be imported.
|
||||
"""
|
||||
for module_name, expected_version in vendor_txt_versions.items():
|
||||
extra_message = ""
|
||||
actual_version = get_vendor_version_from_module(module_name)
|
||||
if not actual_version:
|
||||
extra_message = (
|
||||
" (Unable to locate actual module version, using"
|
||||
" vendor.txt specified version)"
|
||||
)
|
||||
actual_version = expected_version
|
||||
elif parse_version(actual_version) != parse_version(expected_version):
|
||||
extra_message = (
|
||||
" (CONFLICT: vendor.txt suggests version should"
|
||||
f" be {expected_version})"
|
||||
)
|
||||
logger.info("%s==%s%s", module_name, actual_version, extra_message)
|
||||
|
||||
|
||||
def show_vendor_versions() -> None:
|
||||
logger.info("vendored library versions:")
|
||||
|
||||
vendor_txt_versions = create_vendor_txt_map()
|
||||
with indent_log():
|
||||
show_actual_vendor_versions(vendor_txt_versions)
|
||||
|
||||
|
||||
def show_tags(options: Values) -> None:
|
||||
tag_limit = 10
|
||||
|
||||
target_python = make_target_python(options)
|
||||
tags = target_python.get_sorted_tags()
|
||||
|
||||
# Display the target options that were explicitly provided.
|
||||
formatted_target = target_python.format_given()
|
||||
suffix = ""
|
||||
if formatted_target:
|
||||
suffix = f" (target: {formatted_target})"
|
||||
|
||||
msg = f"Compatible tags: {len(tags)}{suffix}"
|
||||
logger.info(msg)
|
||||
|
||||
if options.verbose < 1 and len(tags) > tag_limit:
|
||||
tags_limited = True
|
||||
tags = tags[:tag_limit]
|
||||
else:
|
||||
tags_limited = False
|
||||
|
||||
with indent_log():
|
||||
for tag in tags:
|
||||
logger.info(str(tag))
|
||||
|
||||
if tags_limited:
|
||||
msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
|
||||
logger.info(msg)
|
||||
|
||||
|
||||
def ca_bundle_info(config: Configuration) -> str:
|
||||
levels = {key.split(".", 1)[0] for key, _ in config.items()}
|
||||
if not levels:
|
||||
return "Not specified"
|
||||
|
||||
levels_that_override_global = ["install", "wheel", "download"]
|
||||
global_overriding_level = [
|
||||
level for level in levels if level in levels_that_override_global
|
||||
]
|
||||
if not global_overriding_level:
|
||||
return "global"
|
||||
|
||||
if "global" in levels:
|
||||
levels.remove("global")
|
||||
return ", ".join(levels)
|
||||
|
||||
|
||||
class DebugCommand(Command):
|
||||
"""
|
||||
Display debug information.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog <options>"""
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self) -> None:
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
self.parser.config.load()
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
logger.warning(
|
||||
"This command is only meant for debugging. "
|
||||
"Do not use this with automation for parsing and getting these "
|
||||
"details, since the output and options of this command may "
|
||||
"change without notice."
|
||||
)
|
||||
show_value("pip version", get_pip_version())
|
||||
show_value("sys.version", sys.version)
|
||||
show_value("sys.executable", sys.executable)
|
||||
show_value("sys.getdefaultencoding", sys.getdefaultencoding())
|
||||
show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
|
||||
show_value(
|
||||
"locale.getpreferredencoding",
|
||||
locale.getpreferredencoding(),
|
||||
)
|
||||
show_value("sys.platform", sys.platform)
|
||||
show_sys_implementation()
|
||||
|
||||
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
||||
show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
|
||||
show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
|
||||
show_value("pip._vendor.certifi.where()", where())
|
||||
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
||||
|
||||
show_vendor_versions()
|
||||
|
||||
show_tags(options)
|
||||
|
||||
return SUCCESS
|
146
Dependencies/Python/Lib/site-packages/pip/_internal/commands/download.py
vendored
Normal file
146
Dependencies/Python/Lib/site-packages/pip/_internal/commands/download.py
vendored
Normal file
@ -0,0 +1,146 @@
|
||||
import logging
|
||||
import os
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||
from pip._internal.req.req_install import check_legacy_setup_py_options
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadCommand(RequirementCommand):
|
||||
"""
|
||||
Download packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports downloading from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be downloaded.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] <vcs project url> ...
|
||||
%prog [options] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
self.cmd_opts.add_option(cmdoptions.pre())
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"-d",
|
||||
"--dest",
|
||||
"--destination-dir",
|
||||
"--destination-directory",
|
||||
dest="download_dir",
|
||||
metavar="dir",
|
||||
default=os.curdir,
|
||||
help="Download packages into <dir>.",
|
||||
)
|
||||
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
options.ignore_installed = True
|
||||
# editable doesn't really make sense for `pip download`, but the bowels
|
||||
# of the RequirementSet code require that property.
|
||||
options.editables = []
|
||||
|
||||
cmdoptions.check_dist_restriction(options)
|
||||
|
||||
options.download_dir = normalize_path(options.download_dir)
|
||||
ensure_dir(options.download_dir)
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
target_python = make_target_python(options)
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
target_python=target_python,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
|
||||
build_tracker = self.enter_context(get_build_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
kind="download",
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
check_legacy_setup_py_options(options, reqs)
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
download_dir=options.download_dir,
|
||||
use_user_site=False,
|
||||
verbosity=self.verbosity,
|
||||
)
|
||||
|
||||
resolver = self.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
options=options,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
use_pep517=options.use_pep517,
|
||||
py_version_info=options.python_version,
|
||||
)
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||
|
||||
downloaded: List[str] = []
|
||||
for req in requirement_set.requirements.values():
|
||||
if req.satisfied_by is None:
|
||||
assert req.name is not None
|
||||
preparer.save_linked_requirement(req)
|
||||
downloaded.append(req.name)
|
||||
|
||||
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
|
||||
|
||||
if downloaded:
|
||||
write_output("Successfully downloaded %s", " ".join(downloaded))
|
||||
|
||||
return SUCCESS
|
109
Dependencies/Python/Lib/site-packages/pip/_internal/commands/freeze.py
vendored
Normal file
109
Dependencies/Python/Lib/site-packages/pip/_internal/commands/freeze.py
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
import sys
|
||||
from optparse import Values
|
||||
from typing import AbstractSet, List
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.operations.freeze import freeze
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
|
||||
|
||||
def _should_suppress_build_backends() -> bool:
|
||||
return sys.version_info < (3, 12)
|
||||
|
||||
|
||||
def _dev_pkgs() -> AbstractSet[str]:
|
||||
pkgs = {"pip"}
|
||||
|
||||
if _should_suppress_build_backends():
|
||||
pkgs |= {"setuptools", "distribute", "wheel"}
|
||||
|
||||
return pkgs
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
"""
|
||||
Output installed packages in requirements format.
|
||||
|
||||
packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"-r",
|
||||
"--requirement",
|
||||
dest="requirements",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="file",
|
||||
help=(
|
||||
"Use the order in the given requirements file and its "
|
||||
"comments when generating output. This option can be "
|
||||
"used multiple times."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-l",
|
||||
"--local",
|
||||
dest="local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"If in a virtualenv that has global access, do not output "
|
||||
"globally-installed packages."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--user",
|
||||
dest="user",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Only output packages installed in user-site.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||
self.cmd_opts.add_option(
|
||||
"--all",
|
||||
dest="freeze_all",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Do not skip these packages in the output:"
|
||||
" {}".format(", ".join(_dev_pkgs()))
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--exclude-editable",
|
||||
dest="exclude_editable",
|
||||
action="store_true",
|
||||
help="Exclude editable package from output.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
skip = set(stdlib_pkgs)
|
||||
if not options.freeze_all:
|
||||
skip.update(_dev_pkgs())
|
||||
|
||||
if options.excludes:
|
||||
skip.update(options.excludes)
|
||||
|
||||
cmdoptions.check_list_path_option(options)
|
||||
|
||||
for line in freeze(
|
||||
requirement=options.requirements,
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
paths=options.path,
|
||||
isolated=options.isolated_mode,
|
||||
skip=skip,
|
||||
exclude_editable=options.exclude_editable,
|
||||
):
|
||||
sys.stdout.write(line + "\n")
|
||||
return SUCCESS
|
59
Dependencies/Python/Lib/site-packages/pip/_internal/commands/hash.py
vendored
Normal file
59
Dependencies/Python/Lib/site-packages/pip/_internal/commands/hash.py
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import sys
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
||||
from pip._internal.utils.misc import read_chunks, write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HashCommand(Command):
|
||||
"""
|
||||
Compute a hash of a local package archive.
|
||||
|
||||
These can be used with --hash in a requirements file to do repeatable
|
||||
installs.
|
||||
"""
|
||||
|
||||
usage = "%prog [options] <file> ..."
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"-a",
|
||||
"--algorithm",
|
||||
dest="algorithm",
|
||||
choices=STRONG_HASHES,
|
||||
action="store",
|
||||
default=FAVORITE_HASH,
|
||||
help="The hash algorithm to use: one of {}".format(
|
||||
", ".join(STRONG_HASHES)
|
||||
),
|
||||
)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if not args:
|
||||
self.parser.print_usage(sys.stderr)
|
||||
return ERROR
|
||||
|
||||
algorithm = options.algorithm
|
||||
for path in args:
|
||||
write_output(
|
||||
"%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
|
||||
)
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def _hash_of_file(path: str, algorithm: str) -> str:
|
||||
"""Return the hash digest of a file."""
|
||||
with open(path, "rb") as archive:
|
||||
hash = hashlib.new(algorithm)
|
||||
for chunk in read_chunks(archive):
|
||||
hash.update(chunk)
|
||||
return hash.hexdigest()
|
41
Dependencies/Python/Lib/site-packages/pip/_internal/commands/help.py
vendored
Normal file
41
Dependencies/Python/Lib/site-packages/pip/_internal/commands/help.py
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
|
||||
|
||||
class HelpCommand(Command):
|
||||
"""Show help for commands"""
|
||||
|
||||
usage = """
|
||||
%prog <command>"""
|
||||
ignore_require_venv = True
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
from pip._internal.commands import (
|
||||
commands_dict,
|
||||
create_command,
|
||||
get_similar_commands,
|
||||
)
|
||||
|
||||
try:
|
||||
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||
cmd_name = args[0] # the command we need help for
|
||||
except IndexError:
|
||||
return SUCCESS
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = [f'unknown command "{cmd_name}"']
|
||||
if guess:
|
||||
msg.append(f'maybe you meant "{guess}"')
|
||||
|
||||
raise CommandError(" - ".join(msg))
|
||||
|
||||
command = create_command(cmd_name)
|
||||
command.parser.print_help()
|
||||
|
||||
return SUCCESS
|
139
Dependencies/Python/Lib/site-packages/pip/_internal/commands/index.py
vendored
Normal file
139
Dependencies/Python/Lib/site-packages/pip/_internal/commands/index.py
vendored
Normal file
@ -0,0 +1,139 @@
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import Any, Iterable, List, Optional
|
||||
|
||||
from pip._vendor.packaging.version import Version
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.req_command import IndexGroupCommand
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.commands.search import print_dist_installation_info
|
||||
from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
|
||||
from pip._internal.index.collector import LinkCollector
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.models.target_python import TargetPython
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IndexCommand(IndexGroupCommand):
|
||||
"""
|
||||
Inspect information available from package indexes.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog versions <package>
|
||||
"""
|
||||
|
||||
def add_options(self) -> None:
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
self.cmd_opts.add_option(cmdoptions.pre())
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
handlers = {
|
||||
"versions": self.get_available_package_versions,
|
||||
}
|
||||
|
||||
logger.warning(
|
||||
"pip index is currently an experimental command. "
|
||||
"It may be removed/changed in a future release "
|
||||
"without prior warning."
|
||||
)
|
||||
|
||||
# Determine action
|
||||
if not args or args[0] not in handlers:
|
||||
logger.error(
|
||||
"Need an action (%s) to perform.",
|
||||
", ".join(sorted(handlers)),
|
||||
)
|
||||
return ERROR
|
||||
|
||||
action = args[0]
|
||||
|
||||
# Error handling happens here, not in the action-handlers.
|
||||
try:
|
||||
handlers[action](options, args[1:])
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
return SUCCESS
|
||||
|
||||
def _build_package_finder(
|
||||
self,
|
||||
options: Values,
|
||||
session: PipSession,
|
||||
target_python: Optional[TargetPython] = None,
|
||||
ignore_requires_python: Optional[bool] = None,
|
||||
) -> PackageFinder:
|
||||
"""
|
||||
Create a package finder appropriate to the index command.
|
||||
"""
|
||||
link_collector = LinkCollector.create(session, options=options)
|
||||
|
||||
# Pass allow_yanked=False to ignore yanked versions.
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=False,
|
||||
allow_all_prereleases=options.pre,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
)
|
||||
|
||||
return PackageFinder.create(
|
||||
link_collector=link_collector,
|
||||
selection_prefs=selection_prefs,
|
||||
target_python=target_python,
|
||||
)
|
||||
|
||||
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) != 1:
|
||||
raise CommandError("You need to specify exactly one argument")
|
||||
|
||||
target_python = cmdoptions.make_target_python(options)
|
||||
query = args[0]
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
target_python=target_python,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
|
||||
versions: Iterable[Version] = (
|
||||
candidate.version for candidate in finder.find_all_candidates(query)
|
||||
)
|
||||
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
versions = (
|
||||
version for version in versions if not version.is_prerelease
|
||||
)
|
||||
versions = set(versions)
|
||||
|
||||
if not versions:
|
||||
raise DistributionNotFound(
|
||||
f"No matching distribution found for {query}"
|
||||
)
|
||||
|
||||
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
|
||||
latest = formatted_versions[0]
|
||||
|
||||
write_output(f"{query} ({latest})")
|
||||
write_output("Available versions: {}".format(", ".join(formatted_versions)))
|
||||
print_dist_installation_info(query, latest)
|
92
Dependencies/Python/Lib/site-packages/pip/_internal/commands/inspect.py
vendored
Normal file
92
Dependencies/Python/Lib/site-packages/pip/_internal/commands/inspect.py
vendored
Normal file
@ -0,0 +1,92 @@
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pip._vendor.packaging.markers import default_environment
|
||||
from pip._vendor.rich import print_json
|
||||
|
||||
from pip import __version__
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.metadata import BaseDistribution, get_environment
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InspectCommand(Command):
|
||||
"""
|
||||
Inspect the content of a Python environment and produce a report in JSON format.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"--local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"If in a virtualenv that has global access, do not list "
|
||||
"globally-installed packages."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--user",
|
||||
dest="user",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Only output packages installed in user-site.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
cmdoptions.check_list_path_option(options)
|
||||
dists = get_environment(options.path).iter_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
skip=set(stdlib_pkgs),
|
||||
)
|
||||
output = {
|
||||
"version": "1",
|
||||
"pip_version": __version__,
|
||||
"installed": [self._dist_to_dict(dist) for dist in dists],
|
||||
"environment": default_environment(),
|
||||
# TODO tags? scheme?
|
||||
}
|
||||
print_json(data=output)
|
||||
return SUCCESS
|
||||
|
||||
def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
|
||||
res: Dict[str, Any] = {
|
||||
"metadata": dist.metadata_dict,
|
||||
"metadata_location": dist.info_location,
|
||||
}
|
||||
# direct_url. Note that we don't have download_info (as in the installation
|
||||
# report) since it is not recorded in installed metadata.
|
||||
direct_url = dist.direct_url
|
||||
if direct_url is not None:
|
||||
res["direct_url"] = direct_url.to_dict()
|
||||
else:
|
||||
# Emulate direct_url for legacy editable installs.
|
||||
editable_project_location = dist.editable_project_location
|
||||
if editable_project_location is not None:
|
||||
res["direct_url"] = {
|
||||
"url": path_to_url(editable_project_location),
|
||||
"dir_info": {
|
||||
"editable": True,
|
||||
},
|
||||
}
|
||||
# installer
|
||||
installer = dist.installer
|
||||
if dist.installer:
|
||||
res["installer"] = installer
|
||||
# requested
|
||||
if dist.installed_with_dist_info:
|
||||
res["requested"] = dist.requested
|
||||
return res
|
784
Dependencies/Python/Lib/site-packages/pip/_internal/commands/install.py
vendored
Normal file
784
Dependencies/Python/Lib/site-packages/pip/_internal/commands/install.py
vendored
Normal file
@ -0,0 +1,784 @@
|
||||
import errno
|
||||
import json
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import site
|
||||
from optparse import SUPPRESS_HELP, Values
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.rich import print_json
|
||||
|
||||
# Eagerly import self_outdated_check to avoid crashes. Otherwise,
|
||||
# this module would be imported *after* pip was replaced, resulting
|
||||
# in crashes if the new self_outdated_check module was incompatible
|
||||
# with the rest of pip that's already imported, or allowing a
|
||||
# wheel to execute arbitrary code on install by replacing
|
||||
# self_outdated_check.
|
||||
import pip._internal.self_outdated_check # noqa: F401
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
from pip._internal.cli.req_command import (
|
||||
RequirementCommand,
|
||||
with_cleanup,
|
||||
)
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, InstallationError
|
||||
from pip._internal.locations import get_scheme
|
||||
from pip._internal.metadata import get_environment
|
||||
from pip._internal.models.installation_report import InstallationReport
|
||||
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
||||
from pip._internal.req import install_given_reqs
|
||||
from pip._internal.req.req_install import (
|
||||
InstallRequirement,
|
||||
check_legacy_setup_py_options,
|
||||
)
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.filesystem import test_writable_dir
|
||||
from pip._internal.utils.logging import getLogger
|
||||
from pip._internal.utils.misc import (
|
||||
check_externally_managed,
|
||||
ensure_dir,
|
||||
get_pip_version,
|
||||
protect_pip_from_modification_on_windows,
|
||||
warn_if_run_as_root,
|
||||
write_output,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.virtualenv import (
|
||||
running_under_virtualenv,
|
||||
virtualenv_no_global,
|
||||
)
|
||||
from pip._internal.wheel_builder import build, should_build_for_install_command
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class InstallCommand(RequirementCommand):
|
||||
"""
|
||||
Install packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports installing from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be installed.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.pre())
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.editable())
|
||||
self.cmd_opts.add_option(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
dest="dry_run",
|
||||
default=False,
|
||||
help=(
|
||||
"Don't actually install anything, just print what would be. "
|
||||
"Can be used in combination with --ignore-installed "
|
||||
"to 'resolve' the requirements."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-t",
|
||||
"--target",
|
||||
dest="target_dir",
|
||||
metavar="dir",
|
||||
default=None,
|
||||
help=(
|
||||
"Install packages into <dir>. "
|
||||
"By default this will not replace existing files/folders in "
|
||||
"<dir>. Use --upgrade to replace existing packages in <dir> "
|
||||
"with new versions."
|
||||
),
|
||||
)
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--user",
|
||||
dest="use_user_site",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Install to the Python user install directory for your "
|
||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||
"Windows. (See the Python documentation for site.USER_BASE "
|
||||
"for full details.)"
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--no-user",
|
||||
dest="use_user_site",
|
||||
action="store_false",
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--root",
|
||||
dest="root_path",
|
||||
metavar="dir",
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root directory.",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--prefix",
|
||||
dest="prefix_path",
|
||||
metavar="dir",
|
||||
default=None,
|
||||
help=(
|
||||
"Installation prefix where lib, bin and other top-level "
|
||||
"folders are placed. Note that the resulting installation may "
|
||||
"contain scripts and other resources which reference the "
|
||||
"Python interpreter of pip, and not that of ``--prefix``. "
|
||||
"See also the ``--python`` option if the intention is to "
|
||||
"install packages into another (possibly pip-free) "
|
||||
"environment."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"-U",
|
||||
"--upgrade",
|
||||
dest="upgrade",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Upgrade all specified packages to the newest available "
|
||||
"version. The handling of dependencies depends on the "
|
||||
"upgrade-strategy used."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--upgrade-strategy",
|
||||
dest="upgrade_strategy",
|
||||
default="only-if-needed",
|
||||
choices=["only-if-needed", "eager"],
|
||||
help=(
|
||||
"Determines how dependency upgrading should be handled "
|
||||
"[default: %default]. "
|
||||
'"eager" - dependencies are upgraded regardless of '
|
||||
"whether the currently installed version satisfies the "
|
||||
"requirements of the upgraded package(s). "
|
||||
'"only-if-needed" - are upgraded only when they do not '
|
||||
"satisfy the requirements of the upgraded package(s)."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--force-reinstall",
|
||||
dest="force_reinstall",
|
||||
action="store_true",
|
||||
help="Reinstall all packages even if they are already up-to-date.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"-I",
|
||||
"--ignore-installed",
|
||||
dest="ignore_installed",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Ignore the installed packages, overwriting them. "
|
||||
"This can break your system if the existing package "
|
||||
"is of a different version or was installed "
|
||||
"with a different package manager!"
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--compile",
|
||||
action="store_true",
|
||||
dest="compile",
|
||||
default=True,
|
||||
help="Compile Python source files to bytecode",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--no-compile",
|
||||
action="store_false",
|
||||
dest="compile",
|
||||
help="Do not compile Python source files to bytecode",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--no-warn-script-location",
|
||||
action="store_false",
|
||||
dest="warn_script_location",
|
||||
default=True,
|
||||
help="Do not warn when installing scripts outside PATH",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--no-warn-conflicts",
|
||||
action="store_false",
|
||||
dest="warn_about_conflicts",
|
||||
default=True,
|
||||
help="Do not warn about broken dependencies",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--report",
|
||||
dest="json_report_file",
|
||||
metavar="file",
|
||||
default=None,
|
||||
help=(
|
||||
"Generate a JSON file describing what pip did to install "
|
||||
"the provided requirements. "
|
||||
"Can be used in combination with --dry-run and --ignore-installed "
|
||||
"to 'resolve' the requirements. "
|
||||
"When - is used as file name it writes to stdout. "
|
||||
"When writing to stdout, please combine with the --quiet option "
|
||||
"to avoid mixing pip logging output with JSON output."
|
||||
),
|
||||
)
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if options.use_user_site and options.target_dir is not None:
|
||||
raise CommandError("Can not combine '--user' and '--target'")
|
||||
|
||||
# Check whether the environment we're installing into is externally
|
||||
# managed, as specified in PEP 668. Specifying --root, --target, or
|
||||
# --prefix disables the check, since there's no reliable way to locate
|
||||
# the EXTERNALLY-MANAGED file for those cases. An exception is also
|
||||
# made specifically for "--dry-run --report" for convenience.
|
||||
installing_into_current_environment = (
|
||||
not (options.dry_run and options.json_report_file)
|
||||
and options.root_path is None
|
||||
and options.target_dir is None
|
||||
and options.prefix_path is None
|
||||
)
|
||||
if (
|
||||
installing_into_current_environment
|
||||
and not options.override_externally_managed
|
||||
):
|
||||
check_externally_managed()
|
||||
|
||||
upgrade_strategy = "to-satisfy-only"
|
||||
if options.upgrade:
|
||||
upgrade_strategy = options.upgrade_strategy
|
||||
|
||||
cmdoptions.check_dist_restriction(options, check_target=True)
|
||||
|
||||
logger.verbose("Using %s", get_pip_version())
|
||||
options.use_user_site = decide_user_install(
|
||||
options.use_user_site,
|
||||
prefix_path=options.prefix_path,
|
||||
target_dir=options.target_dir,
|
||||
root_path=options.root_path,
|
||||
isolated_mode=options.isolated_mode,
|
||||
)
|
||||
|
||||
target_temp_dir: Optional[TempDirectory] = None
|
||||
target_temp_dir_path: Optional[str] = None
|
||||
if options.target_dir:
|
||||
options.ignore_installed = True
|
||||
options.target_dir = os.path.abspath(options.target_dir)
|
||||
if (
|
||||
# fmt: off
|
||||
os.path.exists(options.target_dir) and
|
||||
not os.path.isdir(options.target_dir)
|
||||
# fmt: on
|
||||
):
|
||||
raise CommandError(
|
||||
"Target path exists but is not a directory, will not continue."
|
||||
)
|
||||
|
||||
# Create a target directory for using with the target option
|
||||
target_temp_dir = TempDirectory(kind="target")
|
||||
target_temp_dir_path = target_temp_dir.path
|
||||
self.enter_context(target_temp_dir)
|
||||
|
||||
global_options = options.global_options or []
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
target_python = make_target_python(options)
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
target_python=target_python,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
build_tracker = self.enter_context(get_build_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
kind="install",
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
try:
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
check_legacy_setup_py_options(options, reqs)
|
||||
|
||||
wheel_cache = WheelCache(options.cache_dir)
|
||||
|
||||
# Only when installing is it permitted to use PEP 660.
|
||||
# In other circumstances (pip wheel, pip download) we generate
|
||||
# regular (i.e. non editable) metadata and wheels.
|
||||
for req in reqs:
|
||||
req.permit_editable_wheels = True
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
use_user_site=options.use_user_site,
|
||||
verbosity=self.verbosity,
|
||||
)
|
||||
resolver = self.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
options=options,
|
||||
wheel_cache=wheel_cache,
|
||||
use_user_site=options.use_user_site,
|
||||
ignore_installed=options.ignore_installed,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
force_reinstall=options.force_reinstall,
|
||||
upgrade_strategy=upgrade_strategy,
|
||||
use_pep517=options.use_pep517,
|
||||
py_version_info=options.python_version,
|
||||
)
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(
|
||||
reqs, check_supported_wheels=not options.target_dir
|
||||
)
|
||||
|
||||
if options.json_report_file:
|
||||
report = InstallationReport(requirement_set.requirements_to_install)
|
||||
if options.json_report_file == "-":
|
||||
print_json(data=report.to_dict())
|
||||
else:
|
||||
with open(options.json_report_file, "w", encoding="utf-8") as f:
|
||||
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
|
||||
|
||||
if options.dry_run:
|
||||
would_install_items = sorted(
|
||||
(r.metadata["name"], r.metadata["version"])
|
||||
for r in requirement_set.requirements_to_install
|
||||
)
|
||||
if would_install_items:
|
||||
write_output(
|
||||
"Would install %s",
|
||||
" ".join("-".join(item) for item in would_install_items),
|
||||
)
|
||||
return SUCCESS
|
||||
|
||||
try:
|
||||
pip_req = requirement_set.get_requirement("pip")
|
||||
except KeyError:
|
||||
modifying_pip = False
|
||||
else:
|
||||
# If we're not replacing an already installed pip,
|
||||
# we're not modifying it.
|
||||
modifying_pip = pip_req.satisfied_by is None
|
||||
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
|
||||
|
||||
reqs_to_build = [
|
||||
r
|
||||
for r in requirement_set.requirements.values()
|
||||
if should_build_for_install_command(r)
|
||||
]
|
||||
|
||||
_, build_failures = build(
|
||||
reqs_to_build,
|
||||
wheel_cache=wheel_cache,
|
||||
verify=True,
|
||||
build_options=[],
|
||||
global_options=global_options,
|
||||
)
|
||||
|
||||
if build_failures:
|
||||
raise InstallationError(
|
||||
"Failed to build installable wheels for some "
|
||||
"pyproject.toml based projects ({})".format(
|
||||
", ".join(r.name for r in build_failures) # type: ignore
|
||||
)
|
||||
)
|
||||
|
||||
to_install = resolver.get_installation_order(requirement_set)
|
||||
|
||||
# Check for conflicts in the package set we're installing.
|
||||
conflicts: Optional[ConflictDetails] = None
|
||||
should_warn_about_conflicts = (
|
||||
not options.ignore_dependencies and options.warn_about_conflicts
|
||||
)
|
||||
if should_warn_about_conflicts:
|
||||
conflicts = self._determine_conflicts(to_install)
|
||||
|
||||
# Don't warn about script install locations if
|
||||
# --target or --prefix has been specified
|
||||
warn_script_location = options.warn_script_location
|
||||
if options.target_dir or options.prefix_path:
|
||||
warn_script_location = False
|
||||
|
||||
installed = install_given_reqs(
|
||||
to_install,
|
||||
global_options,
|
||||
root=options.root_path,
|
||||
home=target_temp_dir_path,
|
||||
prefix=options.prefix_path,
|
||||
warn_script_location=warn_script_location,
|
||||
use_user_site=options.use_user_site,
|
||||
pycompile=options.compile,
|
||||
)
|
||||
|
||||
lib_locations = get_lib_location_guesses(
|
||||
user=options.use_user_site,
|
||||
home=target_temp_dir_path,
|
||||
root=options.root_path,
|
||||
prefix=options.prefix_path,
|
||||
isolated=options.isolated_mode,
|
||||
)
|
||||
env = get_environment(lib_locations)
|
||||
|
||||
# Display a summary of installed packages, with extra care to
|
||||
# display a package name as it was requested by the user.
|
||||
installed.sort(key=operator.attrgetter("name"))
|
||||
summary = []
|
||||
installed_versions = {}
|
||||
for distribution in env.iter_all_distributions():
|
||||
installed_versions[distribution.canonical_name] = distribution.version
|
||||
for package in installed:
|
||||
display_name = package.name
|
||||
version = installed_versions.get(canonicalize_name(display_name), None)
|
||||
if version:
|
||||
text = f"{display_name}-{version}"
|
||||
else:
|
||||
text = display_name
|
||||
summary.append(text)
|
||||
|
||||
if conflicts is not None:
|
||||
self._warn_about_conflicts(
|
||||
conflicts,
|
||||
resolver_variant=self.determine_resolver_variant(options),
|
||||
)
|
||||
|
||||
installed_desc = " ".join(summary)
|
||||
if installed_desc:
|
||||
write_output(
|
||||
"Successfully installed %s",
|
||||
installed_desc,
|
||||
)
|
||||
except OSError as error:
|
||||
show_traceback = self.verbosity >= 1
|
||||
|
||||
message = create_os_error_message(
|
||||
error,
|
||||
show_traceback,
|
||||
options.use_user_site,
|
||||
)
|
||||
logger.error(message, exc_info=show_traceback)
|
||||
|
||||
return ERROR
|
||||
|
||||
if options.target_dir:
|
||||
assert target_temp_dir
|
||||
self._handle_target_dir(
|
||||
options.target_dir, target_temp_dir, options.upgrade
|
||||
)
|
||||
if options.root_user_action == "warn":
|
||||
warn_if_run_as_root()
|
||||
return SUCCESS
|
||||
|
||||
def _handle_target_dir(
|
||||
self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
|
||||
) -> None:
|
||||
ensure_dir(target_dir)
|
||||
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
lib_dir_list = []
|
||||
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
scheme = get_scheme("", home=target_temp_dir.path)
|
||||
purelib_dir = scheme.purelib
|
||||
platlib_dir = scheme.platlib
|
||||
data_dir = scheme.data
|
||||
|
||||
if os.path.exists(purelib_dir):
|
||||
lib_dir_list.append(purelib_dir)
|
||||
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
||||
lib_dir_list.append(platlib_dir)
|
||||
if os.path.exists(data_dir):
|
||||
lib_dir_list.append(data_dir)
|
||||
|
||||
for lib_dir in lib_dir_list:
|
||||
for item in os.listdir(lib_dir):
|
||||
if lib_dir == data_dir:
|
||||
ddir = os.path.join(data_dir, item)
|
||||
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
||||
continue
|
||||
target_item_dir = os.path.join(target_dir, item)
|
||||
if os.path.exists(target_item_dir):
|
||||
if not upgrade:
|
||||
logger.warning(
|
||||
"Target directory %s already exists. Specify "
|
||||
"--upgrade to force replacement.",
|
||||
target_item_dir,
|
||||
)
|
||||
continue
|
||||
if os.path.islink(target_item_dir):
|
||||
logger.warning(
|
||||
"Target directory %s already exists and is "
|
||||
"a link. pip will not automatically replace "
|
||||
"links, please remove if replacement is "
|
||||
"desired.",
|
||||
target_item_dir,
|
||||
)
|
||||
continue
|
||||
if os.path.isdir(target_item_dir):
|
||||
shutil.rmtree(target_item_dir)
|
||||
else:
|
||||
os.remove(target_item_dir)
|
||||
|
||||
shutil.move(os.path.join(lib_dir, item), target_item_dir)
|
||||
|
||||
def _determine_conflicts(
|
||||
self, to_install: List[InstallRequirement]
|
||||
) -> Optional[ConflictDetails]:
|
||||
try:
|
||||
return check_install_conflicts(to_install)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Error while checking for conflicts. Please file an issue on "
|
||||
"pip's issue tracker: https://github.com/pypa/pip/issues/new"
|
||||
)
|
||||
return None
|
||||
|
||||
def _warn_about_conflicts(
|
||||
self, conflict_details: ConflictDetails, resolver_variant: str
|
||||
) -> None:
|
||||
package_set, (missing, conflicting) = conflict_details
|
||||
if not missing and not conflicting:
|
||||
return
|
||||
|
||||
parts: List[str] = []
|
||||
if resolver_variant == "legacy":
|
||||
parts.append(
|
||||
"pip's legacy dependency resolver does not consider dependency "
|
||||
"conflicts when selecting packages. This behaviour is the "
|
||||
"source of the following dependency conflicts."
|
||||
)
|
||||
else:
|
||||
assert resolver_variant == "resolvelib"
|
||||
parts.append(
|
||||
"pip's dependency resolver does not currently take into account "
|
||||
"all the packages that are installed. This behaviour is the "
|
||||
"source of the following dependency conflicts."
|
||||
)
|
||||
|
||||
# NOTE: There is some duplication here, with commands/check.py
|
||||
for project_name in missing:
|
||||
version = package_set[project_name][0]
|
||||
for dependency in missing[project_name]:
|
||||
message = (
|
||||
f"{project_name} {version} requires {dependency[1]}, "
|
||||
"which is not installed."
|
||||
)
|
||||
parts.append(message)
|
||||
|
||||
for project_name in conflicting:
|
||||
version = package_set[project_name][0]
|
||||
for dep_name, dep_version, req in conflicting[project_name]:
|
||||
message = (
|
||||
"{name} {version} requires {requirement}, but {you} have "
|
||||
"{dep_name} {dep_version} which is incompatible."
|
||||
).format(
|
||||
name=project_name,
|
||||
version=version,
|
||||
requirement=req,
|
||||
dep_name=dep_name,
|
||||
dep_version=dep_version,
|
||||
you=("you" if resolver_variant == "resolvelib" else "you'll"),
|
||||
)
|
||||
parts.append(message)
|
||||
|
||||
logger.critical("\n".join(parts))
|
||||
|
||||
|
||||
def get_lib_location_guesses(
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
isolated: bool = False,
|
||||
prefix: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
scheme = get_scheme(
|
||||
"",
|
||||
user=user,
|
||||
home=home,
|
||||
root=root,
|
||||
isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
return [scheme.purelib, scheme.platlib]
|
||||
|
||||
|
||||
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
||||
return all(
|
||||
test_writable_dir(d)
|
||||
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
|
||||
)
|
||||
|
||||
|
||||
def decide_user_install(
|
||||
use_user_site: Optional[bool],
|
||||
prefix_path: Optional[str] = None,
|
||||
target_dir: Optional[str] = None,
|
||||
root_path: Optional[str] = None,
|
||||
isolated_mode: bool = False,
|
||||
) -> bool:
|
||||
"""Determine whether to do a user install based on the input options.
|
||||
|
||||
If use_user_site is False, no additional checks are done.
|
||||
If use_user_site is True, it is checked for compatibility with other
|
||||
options.
|
||||
If use_user_site is None, the default behaviour depends on the environment,
|
||||
which is provided by the other arguments.
|
||||
"""
|
||||
# In some cases (config from tox), use_user_site can be set to an integer
|
||||
# rather than a bool, which 'use_user_site is False' wouldn't catch.
|
||||
if (use_user_site is not None) and (not use_user_site):
|
||||
logger.debug("Non-user install by explicit request")
|
||||
return False
|
||||
|
||||
if use_user_site:
|
||||
if prefix_path:
|
||||
raise CommandError(
|
||||
"Can not combine '--user' and '--prefix' as they imply "
|
||||
"different installation locations"
|
||||
)
|
||||
if virtualenv_no_global():
|
||||
raise InstallationError(
|
||||
"Can not perform a '--user' install. User site-packages "
|
||||
"are not visible in this virtualenv."
|
||||
)
|
||||
logger.debug("User install by explicit request")
|
||||
return True
|
||||
|
||||
# If we are here, user installs have not been explicitly requested/avoided
|
||||
assert use_user_site is None
|
||||
|
||||
# user install incompatible with --prefix/--target
|
||||
if prefix_path or target_dir:
|
||||
logger.debug("Non-user install due to --prefix or --target option")
|
||||
return False
|
||||
|
||||
# If user installs are not enabled, choose a non-user install
|
||||
if not site.ENABLE_USER_SITE:
|
||||
logger.debug("Non-user install because user site-packages disabled")
|
||||
return False
|
||||
|
||||
# If we have permission for a non-user install, do that,
|
||||
# otherwise do a user install.
|
||||
if site_packages_writable(root=root_path, isolated=isolated_mode):
|
||||
logger.debug("Non-user install because site-packages writeable")
|
||||
return False
|
||||
|
||||
logger.info(
|
||||
"Defaulting to user installation because normal site-packages "
|
||||
"is not writeable"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def create_os_error_message(
|
||||
error: OSError, show_traceback: bool, using_user_site: bool
|
||||
) -> str:
|
||||
"""Format an error message for an OSError
|
||||
|
||||
It may occur anytime during the execution of the install command.
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Mention the error if we are not going to show a traceback
|
||||
parts.append("Could not install packages due to an OSError")
|
||||
if not show_traceback:
|
||||
parts.append(": ")
|
||||
parts.append(str(error))
|
||||
else:
|
||||
parts.append(".")
|
||||
|
||||
# Spilt the error indication from a helper message (if any)
|
||||
parts[-1] += "\n"
|
||||
|
||||
# Suggest useful actions to the user:
|
||||
# (1) using user site-packages or (2) verifying the permissions
|
||||
if error.errno == errno.EACCES:
|
||||
user_option_part = "Consider using the `--user` option"
|
||||
permissions_part = "Check the permissions"
|
||||
|
||||
if not running_under_virtualenv() and not using_user_site:
|
||||
parts.extend(
|
||||
[
|
||||
user_option_part,
|
||||
" or ",
|
||||
permissions_part.lower(),
|
||||
]
|
||||
)
|
||||
else:
|
||||
parts.append(permissions_part)
|
||||
parts.append(".\n")
|
||||
|
||||
# Suggest the user to enable Long Paths if path length is
|
||||
# more than 260
|
||||
if (
|
||||
WINDOWS
|
||||
and error.errno == errno.ENOENT
|
||||
and error.filename
|
||||
and len(error.filename) > 260
|
||||
):
|
||||
parts.append(
|
||||
"HINT: This error might have occurred since "
|
||||
"this system does not have Windows Long Path "
|
||||
"support enabled. You can find information on "
|
||||
"how to enable this at "
|
||||
"https://pip.pypa.io/warnings/enable-long-paths\n"
|
||||
)
|
||||
|
||||
return "".join(parts).strip() + "\n"
|
375
Dependencies/Python/Lib/site-packages/pip/_internal/commands/list.py
vendored
Normal file
375
Dependencies/Python/Lib/site-packages/pip/_internal/commands/list.py
vendored
Normal file
@ -0,0 +1,375 @@
|
||||
import json
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import Version
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.index_command import IndexGroupCommand
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.metadata import BaseDistribution, get_environment
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
from pip._internal.utils.misc import tabulate, write_output
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.network.session import PipSession
|
||||
|
||||
class _DistWithLatestInfo(BaseDistribution):
|
||||
"""Give the distribution object a couple of extra fields.
|
||||
|
||||
These will be populated during ``get_outdated()``. This is dirty but
|
||||
makes the rest of the code much cleaner.
|
||||
"""
|
||||
|
||||
latest_version: Version
|
||||
latest_filetype: str
|
||||
|
||||
_ProcessedDists = Sequence[_DistWithLatestInfo]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ListCommand(IndexGroupCommand):
|
||||
"""
|
||||
List installed packages, including editables.
|
||||
|
||||
Packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"-o",
|
||||
"--outdated",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List outdated packages",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-u",
|
||||
"--uptodate",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List uptodate packages",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-e",
|
||||
"--editable",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List editable projects.",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-l",
|
||||
"--local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"If in a virtualenv that has global access, do not list "
|
||||
"globally-installed packages."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--user",
|
||||
dest="user",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Only output packages installed in user-site.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||
self.cmd_opts.add_option(
|
||||
"--pre",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--format",
|
||||
action="store",
|
||||
dest="list_format",
|
||||
default="columns",
|
||||
choices=("columns", "freeze", "json"),
|
||||
help=(
|
||||
"Select the output format among: columns (default), freeze, or json. "
|
||||
"The 'freeze' format cannot be used with the --outdated option."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--not-required",
|
||||
action="store_true",
|
||||
dest="not_required",
|
||||
help="List packages that are not dependencies of installed packages.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--exclude-editable",
|
||||
action="store_false",
|
||||
dest="include_editable",
|
||||
help="Exclude editable package from output.",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--include-editable",
|
||||
action="store_true",
|
||||
dest="include_editable",
|
||||
help="Include editable package from output.",
|
||||
default=True,
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def handle_pip_version_check(self, options: Values) -> None:
|
||||
if options.outdated or options.uptodate:
|
||||
super().handle_pip_version_check(options)
|
||||
|
||||
def _build_package_finder(
|
||||
self, options: Values, session: "PipSession"
|
||||
) -> "PackageFinder":
|
||||
"""
|
||||
Create a package finder appropriate to this list command.
|
||||
"""
|
||||
# Lazy import the heavy index modules as most list invocations won't need 'em.
|
||||
from pip._internal.index.collector import LinkCollector
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
link_collector = LinkCollector.create(session, options=options)
|
||||
|
||||
# Pass allow_yanked=False to ignore yanked versions.
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=False,
|
||||
allow_all_prereleases=options.pre,
|
||||
)
|
||||
|
||||
return PackageFinder.create(
|
||||
link_collector=link_collector,
|
||||
selection_prefs=selection_prefs,
|
||||
)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if options.outdated and options.uptodate:
|
||||
raise CommandError("Options --outdated and --uptodate cannot be combined.")
|
||||
|
||||
if options.outdated and options.list_format == "freeze":
|
||||
raise CommandError(
|
||||
"List format 'freeze' cannot be used with the --outdated option."
|
||||
)
|
||||
|
||||
cmdoptions.check_list_path_option(options)
|
||||
|
||||
skip = set(stdlib_pkgs)
|
||||
if options.excludes:
|
||||
skip.update(canonicalize_name(n) for n in options.excludes)
|
||||
|
||||
packages: _ProcessedDists = [
|
||||
cast("_DistWithLatestInfo", d)
|
||||
for d in get_environment(options.path).iter_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
editables_only=options.editable,
|
||||
include_editables=options.include_editable,
|
||||
skip=skip,
|
||||
)
|
||||
]
|
||||
|
||||
# get_not_required must be called firstly in order to find and
|
||||
# filter out all dependencies correctly. Otherwise a package
|
||||
# can't be identified as requirement because some parent packages
|
||||
# could be filtered out before.
|
||||
if options.not_required:
|
||||
packages = self.get_not_required(packages, options)
|
||||
|
||||
if options.outdated:
|
||||
packages = self.get_outdated(packages, options)
|
||||
elif options.uptodate:
|
||||
packages = self.get_uptodate(packages, options)
|
||||
|
||||
self.output_package_listing(packages, options)
|
||||
return SUCCESS
|
||||
|
||||
def get_outdated(
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> "_ProcessedDists":
|
||||
return [
|
||||
dist
|
||||
for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version > dist.version
|
||||
]
|
||||
|
||||
def get_uptodate(
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> "_ProcessedDists":
|
||||
return [
|
||||
dist
|
||||
for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version == dist.version
|
||||
]
|
||||
|
||||
def get_not_required(
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> "_ProcessedDists":
|
||||
dep_keys = {
|
||||
canonicalize_name(dep.name)
|
||||
for dist in packages
|
||||
for dep in (dist.iter_dependencies() or ())
|
||||
}
|
||||
|
||||
# Create a set to remove duplicate packages, and cast it to a list
|
||||
# to keep the return type consistent with get_outdated and
|
||||
# get_uptodate
|
||||
return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
|
||||
|
||||
def iter_packages_latest_infos(
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> Generator["_DistWithLatestInfo", None, None]:
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, session)
|
||||
|
||||
def latest_info(
|
||||
dist: "_DistWithLatestInfo",
|
||||
) -> Optional["_DistWithLatestInfo"]:
|
||||
all_candidates = finder.find_all_candidates(dist.canonical_name)
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
all_candidates = [
|
||||
candidate
|
||||
for candidate in all_candidates
|
||||
if not candidate.version.is_prerelease
|
||||
]
|
||||
|
||||
evaluator = finder.make_candidate_evaluator(
|
||||
project_name=dist.canonical_name,
|
||||
)
|
||||
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
||||
if best_candidate is None:
|
||||
return None
|
||||
|
||||
remote_version = best_candidate.version
|
||||
if best_candidate.link.is_wheel:
|
||||
typ = "wheel"
|
||||
else:
|
||||
typ = "sdist"
|
||||
dist.latest_version = remote_version
|
||||
dist.latest_filetype = typ
|
||||
return dist
|
||||
|
||||
for dist in map(latest_info, packages):
|
||||
if dist is not None:
|
||||
yield dist
|
||||
|
||||
def output_package_listing(
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> None:
|
||||
packages = sorted(
|
||||
packages,
|
||||
key=lambda dist: dist.canonical_name,
|
||||
)
|
||||
if options.list_format == "columns" and packages:
|
||||
data, header = format_for_columns(packages, options)
|
||||
self.output_package_listing_columns(data, header)
|
||||
elif options.list_format == "freeze":
|
||||
for dist in packages:
|
||||
if options.verbose >= 1:
|
||||
write_output(
|
||||
"%s==%s (%s)", dist.raw_name, dist.version, dist.location
|
||||
)
|
||||
else:
|
||||
write_output("%s==%s", dist.raw_name, dist.version)
|
||||
elif options.list_format == "json":
|
||||
write_output(format_for_json(packages, options))
|
||||
|
||||
def output_package_listing_columns(
|
||||
self, data: List[List[str]], header: List[str]
|
||||
) -> None:
|
||||
# insert the header first: we need to know the size of column names
|
||||
if len(data) > 0:
|
||||
data.insert(0, header)
|
||||
|
||||
pkg_strings, sizes = tabulate(data)
|
||||
|
||||
# Create and add a separator.
|
||||
if len(data) > 0:
|
||||
pkg_strings.insert(1, " ".join("-" * x for x in sizes))
|
||||
|
||||
for val in pkg_strings:
|
||||
write_output(val)
|
||||
|
||||
|
||||
def format_for_columns(
|
||||
pkgs: "_ProcessedDists", options: Values
|
||||
) -> Tuple[List[List[str]], List[str]]:
|
||||
"""
|
||||
Convert the package data into something usable
|
||||
by output_package_listing_columns.
|
||||
"""
|
||||
header = ["Package", "Version"]
|
||||
|
||||
running_outdated = options.outdated
|
||||
if running_outdated:
|
||||
header.extend(["Latest", "Type"])
|
||||
|
||||
has_editables = any(x.editable for x in pkgs)
|
||||
if has_editables:
|
||||
header.append("Editable project location")
|
||||
|
||||
if options.verbose >= 1:
|
||||
header.append("Location")
|
||||
if options.verbose >= 1:
|
||||
header.append("Installer")
|
||||
|
||||
data = []
|
||||
for proj in pkgs:
|
||||
# if we're working on the 'outdated' list, separate out the
|
||||
# latest_version and type
|
||||
row = [proj.raw_name, proj.raw_version]
|
||||
|
||||
if running_outdated:
|
||||
row.append(str(proj.latest_version))
|
||||
row.append(proj.latest_filetype)
|
||||
|
||||
if has_editables:
|
||||
row.append(proj.editable_project_location or "")
|
||||
|
||||
if options.verbose >= 1:
|
||||
row.append(proj.location or "")
|
||||
if options.verbose >= 1:
|
||||
row.append(proj.installer)
|
||||
|
||||
data.append(row)
|
||||
|
||||
return data, header
|
||||
|
||||
|
||||
def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
|
||||
data = []
|
||||
for dist in packages:
|
||||
info = {
|
||||
"name": dist.raw_name,
|
||||
"version": str(dist.version),
|
||||
}
|
||||
if options.verbose >= 1:
|
||||
info["location"] = dist.location or ""
|
||||
info["installer"] = dist.installer
|
||||
if options.outdated:
|
||||
info["latest_version"] = str(dist.latest_version)
|
||||
info["latest_filetype"] = dist.latest_filetype
|
||||
editable_project_location = dist.editable_project_location
|
||||
if editable_project_location:
|
||||
info["editable_project_location"] = editable_project_location
|
||||
data.append(info)
|
||||
return json.dumps(data)
|
172
Dependencies/Python/Lib/site-packages/pip/_internal/commands/search.py
vendored
Normal file
172
Dependencies/Python/Lib/site-packages/pip/_internal/commands/search.py
vendored
Normal file
@ -0,0 +1,172 @@
|
||||
import logging
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
import xmlrpc.client
|
||||
from collections import OrderedDict
|
||||
from optparse import Values
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict
|
||||
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.req_command import SessionCommandMixin
|
||||
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.metadata import get_default_environment
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.network.xmlrpc import PipXmlrpcTransport
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class TransformedHit(TypedDict):
|
||||
name: str
|
||||
summary: str
|
||||
versions: List[str]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchCommand(Command, SessionCommandMixin):
|
||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||
|
||||
usage = """
|
||||
%prog [options] <query>"""
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"-i",
|
||||
"--index",
|
||||
dest="index",
|
||||
metavar="URL",
|
||||
default=PyPI.pypi_url,
|
||||
help="Base URL of Python Package Index (default %default)",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if not args:
|
||||
raise CommandError("Missing required argument (search query).")
|
||||
query = args
|
||||
pypi_hits = self.search(query, options)
|
||||
hits = transform_hits(pypi_hits)
|
||||
|
||||
terminal_width = None
|
||||
if sys.stdout.isatty():
|
||||
terminal_width = shutil.get_terminal_size()[0]
|
||||
|
||||
print_results(hits, terminal_width=terminal_width)
|
||||
if pypi_hits:
|
||||
return SUCCESS
|
||||
return NO_MATCHES_FOUND
|
||||
|
||||
def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
|
||||
index_url = options.index
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
transport = PipXmlrpcTransport(index_url, session)
|
||||
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
||||
try:
|
||||
hits = pypi.search({"name": query, "summary": query}, "or")
|
||||
except xmlrpc.client.Fault as fault:
|
||||
message = (
|
||||
f"XMLRPC request failed [code: {fault.faultCode}]\n{fault.faultString}"
|
||||
)
|
||||
raise CommandError(message)
|
||||
assert isinstance(hits, list)
|
||||
return hits
|
||||
|
||||
|
||||
def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
|
||||
"""
|
||||
The list from pypi is really a list of versions. We want a list of
|
||||
packages with the list of versions stored inline. This converts the
|
||||
list from pypi into one we can use.
|
||||
"""
|
||||
packages: Dict[str, TransformedHit] = OrderedDict()
|
||||
for hit in hits:
|
||||
name = hit["name"]
|
||||
summary = hit["summary"]
|
||||
version = hit["version"]
|
||||
|
||||
if name not in packages.keys():
|
||||
packages[name] = {
|
||||
"name": name,
|
||||
"summary": summary,
|
||||
"versions": [version],
|
||||
}
|
||||
else:
|
||||
packages[name]["versions"].append(version)
|
||||
|
||||
# if this is the highest version, replace summary and score
|
||||
if version == highest_version(packages[name]["versions"]):
|
||||
packages[name]["summary"] = summary
|
||||
|
||||
return list(packages.values())
|
||||
|
||||
|
||||
def print_dist_installation_info(name: str, latest: str) -> None:
|
||||
env = get_default_environment()
|
||||
dist = env.get_distribution(name)
|
||||
if dist is not None:
|
||||
with indent_log():
|
||||
if dist.version == latest:
|
||||
write_output("INSTALLED: %s (latest)", dist.version)
|
||||
else:
|
||||
write_output("INSTALLED: %s", dist.version)
|
||||
if parse_version(latest).pre:
|
||||
write_output(
|
||||
"LATEST: %s (pre-release; install"
|
||||
" with `pip install --pre`)",
|
||||
latest,
|
||||
)
|
||||
else:
|
||||
write_output("LATEST: %s", latest)
|
||||
|
||||
|
||||
def print_results(
|
||||
hits: List["TransformedHit"],
|
||||
name_column_width: Optional[int] = None,
|
||||
terminal_width: Optional[int] = None,
|
||||
) -> None:
|
||||
if not hits:
|
||||
return
|
||||
if name_column_width is None:
|
||||
name_column_width = (
|
||||
max(
|
||||
[
|
||||
len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
|
||||
for hit in hits
|
||||
]
|
||||
)
|
||||
+ 4
|
||||
)
|
||||
|
||||
for hit in hits:
|
||||
name = hit["name"]
|
||||
summary = hit["summary"] or ""
|
||||
latest = highest_version(hit.get("versions", ["-"]))
|
||||
if terminal_width is not None:
|
||||
target_width = terminal_width - name_column_width - 5
|
||||
if target_width > 10:
|
||||
# wrap and indent summary to fit terminal
|
||||
summary_lines = textwrap.wrap(summary, target_width)
|
||||
summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
|
||||
|
||||
name_latest = f"{name} ({latest})"
|
||||
line = f"{name_latest:{name_column_width}} - {summary}"
|
||||
try:
|
||||
write_output(line)
|
||||
print_dist_installation_info(name, latest)
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
|
||||
def highest_version(versions: List[str]) -> str:
|
||||
return max(versions, key=parse_version)
|
224
Dependencies/Python/Lib/site-packages/pip/_internal/commands/show.py
vendored
Normal file
224
Dependencies/Python/Lib/site-packages/pip/_internal/commands/show.py
vendored
Normal file
@ -0,0 +1,224 @@
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
|
||||
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.metadata import BaseDistribution, get_default_environment
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ShowCommand(Command):
|
||||
"""
|
||||
Show information about one or more installed packages.
|
||||
|
||||
The output is in RFC-compliant mail header format.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <package> ..."""
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"-f",
|
||||
"--files",
|
||||
dest="files",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Show the full list of installed files for each package.",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if not args:
|
||||
logger.warning("ERROR: Please provide a package name or names.")
|
||||
return ERROR
|
||||
query = args
|
||||
|
||||
results = search_packages_info(query)
|
||||
if not print_results(
|
||||
results, list_files=options.files, verbose=options.verbose
|
||||
):
|
||||
return ERROR
|
||||
return SUCCESS
|
||||
|
||||
|
||||
class _PackageInfo(NamedTuple):
|
||||
name: str
|
||||
version: str
|
||||
location: str
|
||||
editable_project_location: Optional[str]
|
||||
requires: List[str]
|
||||
required_by: List[str]
|
||||
installer: str
|
||||
metadata_version: str
|
||||
classifiers: List[str]
|
||||
summary: str
|
||||
homepage: str
|
||||
project_urls: List[str]
|
||||
author: str
|
||||
author_email: str
|
||||
license: str
|
||||
license_expression: str
|
||||
entry_points: List[str]
|
||||
files: Optional[List[str]]
|
||||
|
||||
|
||||
def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
|
||||
"""
|
||||
Gather details from installed distributions. Print distribution name,
|
||||
version, location, and installed files. Installed files requires a
|
||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||
directory.
|
||||
"""
|
||||
env = get_default_environment()
|
||||
|
||||
installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
|
||||
query_names = [canonicalize_name(name) for name in query]
|
||||
missing = sorted(
|
||||
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
||||
)
|
||||
if missing:
|
||||
logger.warning("Package(s) not found: %s", ", ".join(missing))
|
||||
|
||||
def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
|
||||
return (
|
||||
dist.metadata["Name"] or "UNKNOWN"
|
||||
for dist in installed.values()
|
||||
if current_dist.canonical_name
|
||||
in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
|
||||
)
|
||||
|
||||
for query_name in query_names:
|
||||
try:
|
||||
dist = installed[query_name]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
try:
|
||||
requires = sorted(
|
||||
# Avoid duplicates in requirements (e.g. due to environment markers).
|
||||
{req.name for req in dist.iter_dependencies()},
|
||||
key=str.lower,
|
||||
)
|
||||
except InvalidRequirement:
|
||||
requires = sorted(dist.iter_raw_dependencies(), key=str.lower)
|
||||
|
||||
try:
|
||||
required_by = sorted(_get_requiring_packages(dist), key=str.lower)
|
||||
except InvalidRequirement:
|
||||
required_by = ["#N/A"]
|
||||
|
||||
try:
|
||||
entry_points_text = dist.read_text("entry_points.txt")
|
||||
entry_points = entry_points_text.splitlines(keepends=False)
|
||||
except FileNotFoundError:
|
||||
entry_points = []
|
||||
|
||||
files_iter = dist.iter_declared_entries()
|
||||
if files_iter is None:
|
||||
files: Optional[List[str]] = None
|
||||
else:
|
||||
files = sorted(files_iter)
|
||||
|
||||
metadata = dist.metadata
|
||||
|
||||
project_urls = metadata.get_all("Project-URL", [])
|
||||
homepage = metadata.get("Home-page", "")
|
||||
if not homepage:
|
||||
# It's common that there is a "homepage" Project-URL, but Home-page
|
||||
# remains unset (especially as PEP 621 doesn't surface the field).
|
||||
#
|
||||
# This logic was taken from PyPI's codebase.
|
||||
for url in project_urls:
|
||||
url_label, url = url.split(",", maxsplit=1)
|
||||
normalized_label = (
|
||||
url_label.casefold().replace("-", "").replace("_", "").strip()
|
||||
)
|
||||
if normalized_label == "homepage":
|
||||
homepage = url.strip()
|
||||
break
|
||||
|
||||
yield _PackageInfo(
|
||||
name=dist.raw_name,
|
||||
version=dist.raw_version,
|
||||
location=dist.location or "",
|
||||
editable_project_location=dist.editable_project_location,
|
||||
requires=requires,
|
||||
required_by=required_by,
|
||||
installer=dist.installer,
|
||||
metadata_version=dist.metadata_version or "",
|
||||
classifiers=metadata.get_all("Classifier", []),
|
||||
summary=metadata.get("Summary", ""),
|
||||
homepage=homepage,
|
||||
project_urls=project_urls,
|
||||
author=metadata.get("Author", ""),
|
||||
author_email=metadata.get("Author-email", ""),
|
||||
license=metadata.get("License", ""),
|
||||
license_expression=metadata.get("License-Expression", ""),
|
||||
entry_points=entry_points,
|
||||
files=files,
|
||||
)
|
||||
|
||||
|
||||
def print_results(
|
||||
distributions: Iterable[_PackageInfo],
|
||||
list_files: bool,
|
||||
verbose: bool,
|
||||
) -> bool:
|
||||
"""
|
||||
Print the information from installed distributions found.
|
||||
"""
|
||||
results_printed = False
|
||||
for i, dist in enumerate(distributions):
|
||||
results_printed = True
|
||||
if i > 0:
|
||||
write_output("---")
|
||||
|
||||
metadata_version_tuple = tuple(map(int, dist.metadata_version.split(".")))
|
||||
|
||||
write_output("Name: %s", dist.name)
|
||||
write_output("Version: %s", dist.version)
|
||||
write_output("Summary: %s", dist.summary)
|
||||
write_output("Home-page: %s", dist.homepage)
|
||||
write_output("Author: %s", dist.author)
|
||||
write_output("Author-email: %s", dist.author_email)
|
||||
if metadata_version_tuple >= (2, 4) and dist.license_expression:
|
||||
write_output("License-Expression: %s", dist.license_expression)
|
||||
else:
|
||||
write_output("License: %s", dist.license)
|
||||
write_output("Location: %s", dist.location)
|
||||
if dist.editable_project_location is not None:
|
||||
write_output(
|
||||
"Editable project location: %s", dist.editable_project_location
|
||||
)
|
||||
write_output("Requires: %s", ", ".join(dist.requires))
|
||||
write_output("Required-by: %s", ", ".join(dist.required_by))
|
||||
|
||||
if verbose:
|
||||
write_output("Metadata-Version: %s", dist.metadata_version)
|
||||
write_output("Installer: %s", dist.installer)
|
||||
write_output("Classifiers:")
|
||||
for classifier in dist.classifiers:
|
||||
write_output(" %s", classifier)
|
||||
write_output("Entry-points:")
|
||||
for entry in dist.entry_points:
|
||||
write_output(" %s", entry.strip())
|
||||
write_output("Project-URLs:")
|
||||
for project_url in dist.project_urls:
|
||||
write_output(" %s", project_url)
|
||||
if list_files:
|
||||
write_output("Files:")
|
||||
if dist.files is None:
|
||||
write_output("Cannot locate RECORD or installed-files.txt")
|
||||
else:
|
||||
for line in dist.files:
|
||||
write_output(" %s", line.strip())
|
||||
return results_printed
|
114
Dependencies/Python/Lib/site-packages/pip/_internal/commands/uninstall.py
vendored
Normal file
114
Dependencies/Python/Lib/site-packages/pip/_internal/commands/uninstall.py
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.index_command import SessionCommandMixin
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.req import parse_requirements
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_line,
|
||||
install_req_from_parsed_requirement,
|
||||
)
|
||||
from pip._internal.utils.misc import (
|
||||
check_externally_managed,
|
||||
protect_pip_from_modification_on_windows,
|
||||
warn_if_run_as_root,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UninstallCommand(Command, SessionCommandMixin):
|
||||
"""
|
||||
Uninstall packages.
|
||||
|
||||
pip is able to uninstall most installed packages. Known exceptions are:
|
||||
|
||||
- Pure distutils packages installed with ``python setup.py install``, which
|
||||
leave behind no metadata to determine what files were installed.
|
||||
- Script wrappers installed by ``python setup.py develop``.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <package> ...
|
||||
%prog [options] -r <requirements file> ..."""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"-r",
|
||||
"--requirement",
|
||||
dest="requirements",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="file",
|
||||
help=(
|
||||
"Uninstall all the packages listed in the given requirements "
|
||||
"file. This option can be used multiple times."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-y",
|
||||
"--yes",
|
||||
dest="yes",
|
||||
action="store_true",
|
||||
help="Don't ask for confirmation of uninstall deletions.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
session = self.get_default_session(options)
|
||||
|
||||
reqs_to_uninstall = {}
|
||||
for name in args:
|
||||
req = install_req_from_line(
|
||||
name,
|
||||
isolated=options.isolated_mode,
|
||||
)
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
else:
|
||||
logger.warning(
|
||||
"Invalid requirement: %r ignored -"
|
||||
" the uninstall command expects named"
|
||||
" requirements.",
|
||||
name,
|
||||
)
|
||||
for filename in options.requirements:
|
||||
for parsed_req in parse_requirements(
|
||||
filename, options=options, session=session
|
||||
):
|
||||
req = install_req_from_parsed_requirement(
|
||||
parsed_req, isolated=options.isolated_mode
|
||||
)
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
if not reqs_to_uninstall:
|
||||
raise InstallationError(
|
||||
f"You must give at least one requirement to {self.name} (see "
|
||||
f'"pip help {self.name}")'
|
||||
)
|
||||
|
||||
if not options.override_externally_managed:
|
||||
check_externally_managed()
|
||||
|
||||
protect_pip_from_modification_on_windows(
|
||||
modifying_pip="pip" in reqs_to_uninstall
|
||||
)
|
||||
|
||||
for req in reqs_to_uninstall.values():
|
||||
uninstall_pathset = req.uninstall(
|
||||
auto_confirm=options.yes,
|
||||
verbose=self.verbosity > 0,
|
||||
)
|
||||
if uninstall_pathset:
|
||||
uninstall_pathset.commit()
|
||||
if options.root_user_action == "warn":
|
||||
warn_if_run_as_root()
|
||||
return SUCCESS
|
182
Dependencies/Python/Lib/site-packages/pip/_internal/commands/wheel.py
vendored
Normal file
182
Dependencies/Python/Lib/site-packages/pip/_internal/commands/wheel.py
vendored
Normal file
@ -0,0 +1,182 @@
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||
from pip._internal.req.req_install import (
|
||||
InstallRequirement,
|
||||
check_legacy_setup_py_options,
|
||||
)
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WheelCommand(RequirementCommand):
|
||||
"""
|
||||
Build Wheel archives for your requirements and dependencies.
|
||||
|
||||
Wheel is a built-package format, and offers the advantage of not
|
||||
recompiling your software during every install. For more details, see the
|
||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||
|
||||
'pip wheel' uses the build system interface as described here:
|
||||
https://pip.pypa.io/en/stable/reference/build-system/
|
||||
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> ...
|
||||
%prog [options] -r <requirements file> ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
"-w",
|
||||
"--wheel-dir",
|
||||
dest="wheel_dir",
|
||||
metavar="dir",
|
||||
default=os.curdir,
|
||||
help=(
|
||||
"Build wheels into <dir>, where the default is the "
|
||||
"current working directory."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.editable())
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--no-verify",
|
||||
dest="no_verify",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Don't verify if built wheel is valid.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||
self.cmd_opts.add_option(cmdoptions.build_options())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--pre",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
session = self.get_default_session(options)
|
||||
|
||||
finder = self._build_package_finder(options, session)
|
||||
|
||||
options.wheel_dir = normalize_path(options.wheel_dir)
|
||||
ensure_dir(options.wheel_dir)
|
||||
|
||||
build_tracker = self.enter_context(get_build_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
kind="wheel",
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
check_legacy_setup_py_options(options, reqs)
|
||||
|
||||
wheel_cache = WheelCache(options.cache_dir)
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
download_dir=options.wheel_dir,
|
||||
use_user_site=False,
|
||||
verbosity=self.verbosity,
|
||||
)
|
||||
|
||||
resolver = self.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
options=options,
|
||||
wheel_cache=wheel_cache,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
use_pep517=options.use_pep517,
|
||||
)
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||
|
||||
reqs_to_build: List[InstallRequirement] = []
|
||||
for req in requirement_set.requirements.values():
|
||||
if req.is_wheel:
|
||||
preparer.save_linked_requirement(req)
|
||||
elif should_build_for_wheel_command(req):
|
||||
reqs_to_build.append(req)
|
||||
|
||||
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
|
||||
|
||||
# build wheels
|
||||
build_successes, build_failures = build(
|
||||
reqs_to_build,
|
||||
wheel_cache=wheel_cache,
|
||||
verify=(not options.no_verify),
|
||||
build_options=options.build_options or [],
|
||||
global_options=options.global_options or [],
|
||||
)
|
||||
for req in build_successes:
|
||||
assert req.link and req.link.is_wheel
|
||||
assert req.local_file_path
|
||||
# copy from cache to target directory
|
||||
try:
|
||||
shutil.copy(req.local_file_path, options.wheel_dir)
|
||||
except OSError as e:
|
||||
logger.warning(
|
||||
"Building wheel for %s failed: %s",
|
||||
req.name,
|
||||
e,
|
||||
)
|
||||
build_failures.append(req)
|
||||
if len(build_failures) != 0:
|
||||
raise CommandError("Failed to build one or more wheels")
|
||||
|
||||
return SUCCESS
|
383
Dependencies/Python/Lib/site-packages/pip/_internal/configuration.py
vendored
Normal file
383
Dependencies/Python/Lib/site-packages/pip/_internal/configuration.py
vendored
Normal file
@ -0,0 +1,383 @@
|
||||
"""Configuration management setup
|
||||
|
||||
Some terminology:
|
||||
- name
|
||||
As written in config files.
|
||||
- value
|
||||
Value associated with a name
|
||||
- key
|
||||
Name combined with it's section (section.name)
|
||||
- variant
|
||||
A single word describing where the configuration key-value pair came from
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import locale
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||
|
||||
from pip._internal.exceptions import (
|
||||
ConfigurationError,
|
||||
ConfigurationFileCouldNotBeLoaded,
|
||||
)
|
||||
from pip._internal.utils import appdirs
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import getLogger
|
||||
from pip._internal.utils.misc import ensure_dir, enum
|
||||
|
||||
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||
Kind = NewType("Kind", str)
|
||||
|
||||
CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
|
||||
ENV_NAMES_IGNORED = "version", "help"
|
||||
|
||||
# The kinds of configurations there are.
|
||||
kinds = enum(
|
||||
USER="user", # User Specific
|
||||
GLOBAL="global", # System Wide
|
||||
SITE="site", # [Virtual] Environment Specific
|
||||
ENV="env", # from PIP_CONFIG_FILE
|
||||
ENV_VAR="env-var", # from Environment Variables
|
||||
)
|
||||
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
||||
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||
def _normalize_name(name: str) -> str:
|
||||
"""Make a name consistent regardless of source (environment or file)"""
|
||||
name = name.lower().replace("_", "-")
|
||||
if name.startswith("--"):
|
||||
name = name[2:] # only prefer long opts
|
||||
return name
|
||||
|
||||
|
||||
def _disassemble_key(name: str) -> List[str]:
|
||||
if "." not in name:
|
||||
error_message = (
|
||||
"Key does not contain dot separated section and key. "
|
||||
f"Perhaps you wanted to use 'global.{name}' instead?"
|
||||
)
|
||||
raise ConfigurationError(error_message)
|
||||
return name.split(".", 1)
|
||||
|
||||
|
||||
def get_configuration_files() -> Dict[Kind, List[str]]:
|
||||
global_config_files = [
|
||||
os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
|
||||
]
|
||||
|
||||
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
||||
legacy_config_file = os.path.join(
|
||||
os.path.expanduser("~"),
|
||||
"pip" if WINDOWS else ".pip",
|
||||
CONFIG_BASENAME,
|
||||
)
|
||||
new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
|
||||
return {
|
||||
kinds.GLOBAL: global_config_files,
|
||||
kinds.SITE: [site_config_file],
|
||||
kinds.USER: [legacy_config_file, new_config_file],
|
||||
}
|
||||
|
||||
|
||||
class Configuration:
|
||||
"""Handles management of configuration.
|
||||
|
||||
Provides an interface to accessing and managing configuration files.
|
||||
|
||||
This class converts provides an API that takes "section.key-name" style
|
||||
keys and stores the value associated with it as "key-name" under the
|
||||
section "section".
|
||||
|
||||
This allows for a clean interface wherein the both the section and the
|
||||
key-name are preserved in an easy to manage form in the configuration files
|
||||
and the data stored is also nice.
|
||||
"""
|
||||
|
||||
def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
|
||||
super().__init__()
|
||||
|
||||
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
||||
raise ConfigurationError(
|
||||
"Got invalid value for load_only - should be one of {}".format(
|
||||
", ".join(map(repr, VALID_LOAD_ONLY))
|
||||
)
|
||||
)
|
||||
self.isolated = isolated
|
||||
self.load_only = load_only
|
||||
|
||||
# Because we keep track of where we got the data from
|
||||
self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
|
||||
variant: [] for variant in OVERRIDE_ORDER
|
||||
}
|
||||
self._config: Dict[Kind, Dict[str, Any]] = {
|
||||
variant: {} for variant in OVERRIDE_ORDER
|
||||
}
|
||||
self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
|
||||
|
||||
def load(self) -> None:
|
||||
"""Loads configuration from configuration files and environment"""
|
||||
self._load_config_files()
|
||||
if not self.isolated:
|
||||
self._load_environment_vars()
|
||||
|
||||
def get_file_to_edit(self) -> Optional[str]:
|
||||
"""Returns the file with highest priority in configuration"""
|
||||
assert self.load_only is not None, "Need to be specified a file to be editing"
|
||||
|
||||
try:
|
||||
return self._get_parser_to_modify()[0]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def items(self) -> Iterable[Tuple[str, Any]]:
|
||||
"""Returns key-value pairs like dict.items() representing the loaded
|
||||
configuration
|
||||
"""
|
||||
return self._dictionary.items()
|
||||
|
||||
def get_value(self, key: str) -> Any:
|
||||
"""Get a value from the configuration."""
|
||||
orig_key = key
|
||||
key = _normalize_name(key)
|
||||
try:
|
||||
return self._dictionary[key]
|
||||
except KeyError:
|
||||
# disassembling triggers a more useful error message than simply
|
||||
# "No such key" in the case that the key isn't in the form command.option
|
||||
_disassemble_key(key)
|
||||
raise ConfigurationError(f"No such key - {orig_key}")
|
||||
|
||||
def set_value(self, key: str, value: Any) -> None:
|
||||
"""Modify a value in the configuration."""
|
||||
key = _normalize_name(key)
|
||||
self._ensure_have_load_only()
|
||||
|
||||
assert self.load_only
|
||||
fname, parser = self._get_parser_to_modify()
|
||||
|
||||
if parser is not None:
|
||||
section, name = _disassemble_key(key)
|
||||
|
||||
# Modify the parser and the configuration
|
||||
if not parser.has_section(section):
|
||||
parser.add_section(section)
|
||||
parser.set(section, name, value)
|
||||
|
||||
self._config[self.load_only][key] = value
|
||||
self._mark_as_modified(fname, parser)
|
||||
|
||||
def unset_value(self, key: str) -> None:
|
||||
"""Unset a value in the configuration."""
|
||||
orig_key = key
|
||||
key = _normalize_name(key)
|
||||
self._ensure_have_load_only()
|
||||
|
||||
assert self.load_only
|
||||
if key not in self._config[self.load_only]:
|
||||
raise ConfigurationError(f"No such key - {orig_key}")
|
||||
|
||||
fname, parser = self._get_parser_to_modify()
|
||||
|
||||
if parser is not None:
|
||||
section, name = _disassemble_key(key)
|
||||
if not (
|
||||
parser.has_section(section) and parser.remove_option(section, name)
|
||||
):
|
||||
# The option was not removed.
|
||||
raise ConfigurationError(
|
||||
"Fatal Internal error [id=1]. Please report as a bug."
|
||||
)
|
||||
|
||||
# The section may be empty after the option was removed.
|
||||
if not parser.items(section):
|
||||
parser.remove_section(section)
|
||||
self._mark_as_modified(fname, parser)
|
||||
|
||||
del self._config[self.load_only][key]
|
||||
|
||||
def save(self) -> None:
|
||||
"""Save the current in-memory state."""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
for fname, parser in self._modified_parsers:
|
||||
logger.info("Writing to %s", fname)
|
||||
|
||||
# Ensure directory exists.
|
||||
ensure_dir(os.path.dirname(fname))
|
||||
|
||||
# Ensure directory's permission(need to be writeable)
|
||||
try:
|
||||
with open(fname, "w") as f:
|
||||
parser.write(f)
|
||||
except OSError as error:
|
||||
raise ConfigurationError(
|
||||
f"An error occurred while writing to the configuration file "
|
||||
f"{fname}: {error}"
|
||||
)
|
||||
|
||||
#
|
||||
# Private routines
|
||||
#
|
||||
|
||||
def _ensure_have_load_only(self) -> None:
|
||||
if self.load_only is None:
|
||||
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||
logger.debug("Will be working with %s variant only", self.load_only)
|
||||
|
||||
@property
|
||||
def _dictionary(self) -> Dict[str, Any]:
|
||||
"""A dictionary representing the loaded configuration."""
|
||||
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||
# are not needed here.
|
||||
retval = {}
|
||||
|
||||
for variant in OVERRIDE_ORDER:
|
||||
retval.update(self._config[variant])
|
||||
|
||||
return retval
|
||||
|
||||
def _load_config_files(self) -> None:
|
||||
"""Loads configuration from configuration files"""
|
||||
config_files = dict(self.iter_config_files())
|
||||
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||
logger.debug(
|
||||
"Skipping loading configuration files due to "
|
||||
"environment's PIP_CONFIG_FILE being os.devnull"
|
||||
)
|
||||
return
|
||||
|
||||
for variant, files in config_files.items():
|
||||
for fname in files:
|
||||
# If there's specific variant set in `load_only`, load only
|
||||
# that variant, not the others.
|
||||
if self.load_only is not None and variant != self.load_only:
|
||||
logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
|
||||
continue
|
||||
|
||||
parser = self._load_file(variant, fname)
|
||||
|
||||
# Keeping track of the parsers used
|
||||
self._parsers[variant].append((fname, parser))
|
||||
|
||||
def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
|
||||
logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
|
||||
parser = self._construct_parser(fname)
|
||||
|
||||
for section in parser.sections():
|
||||
items = parser.items(section)
|
||||
self._config[variant].update(self._normalized_keys(section, items))
|
||||
|
||||
return parser
|
||||
|
||||
def _construct_parser(self, fname: str) -> RawConfigParser:
|
||||
parser = configparser.RawConfigParser()
|
||||
# If there is no such file, don't bother reading it but create the
|
||||
# parser anyway, to hold the data.
|
||||
# Doing this is useful when modifying and saving files, where we don't
|
||||
# need to construct a parser.
|
||||
if os.path.exists(fname):
|
||||
locale_encoding = locale.getpreferredencoding(False)
|
||||
try:
|
||||
parser.read(fname, encoding=locale_encoding)
|
||||
except UnicodeDecodeError:
|
||||
# See https://github.com/pypa/pip/issues/4963
|
||||
raise ConfigurationFileCouldNotBeLoaded(
|
||||
reason=f"contains invalid {locale_encoding} characters",
|
||||
fname=fname,
|
||||
)
|
||||
except configparser.Error as error:
|
||||
# See https://github.com/pypa/pip/issues/4893
|
||||
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
||||
return parser
|
||||
|
||||
def _load_environment_vars(self) -> None:
|
||||
"""Loads configuration from environment variables"""
|
||||
self._config[kinds.ENV_VAR].update(
|
||||
self._normalized_keys(":env:", self.get_environ_vars())
|
||||
)
|
||||
|
||||
def _normalized_keys(
|
||||
self, section: str, items: Iterable[Tuple[str, Any]]
|
||||
) -> Dict[str, Any]:
|
||||
"""Normalizes items to construct a dictionary with normalized keys.
|
||||
|
||||
This routine is where the names become keys and are made the same
|
||||
regardless of source - configuration files or environment.
|
||||
"""
|
||||
normalized = {}
|
||||
for name, val in items:
|
||||
key = section + "." + _normalize_name(name)
|
||||
normalized[key] = val
|
||||
return normalized
|
||||
|
||||
def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
|
||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||
for key, val in os.environ.items():
|
||||
if key.startswith("PIP_"):
|
||||
name = key[4:].lower()
|
||||
if name not in ENV_NAMES_IGNORED:
|
||||
yield name, val
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
|
||||
"""Yields variant and configuration files associated with it.
|
||||
|
||||
This should be treated like items of a dictionary. The order
|
||||
here doesn't affect what gets overridden. That is controlled
|
||||
by OVERRIDE_ORDER. However this does control the order they are
|
||||
displayed to the user. It's probably most ergonomic to display
|
||||
things in the same order as OVERRIDE_ORDER
|
||||
"""
|
||||
# SMELL: Move the conditions out of this function
|
||||
|
||||
env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
|
||||
config_files = get_configuration_files()
|
||||
|
||||
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
|
||||
|
||||
# per-user config is not loaded when env_config_file exists
|
||||
should_load_user_config = not self.isolated and not (
|
||||
env_config_file and os.path.exists(env_config_file)
|
||||
)
|
||||
if should_load_user_config:
|
||||
# The legacy config file is overridden by the new config file
|
||||
yield kinds.USER, config_files[kinds.USER]
|
||||
|
||||
# virtualenv config
|
||||
yield kinds.SITE, config_files[kinds.SITE]
|
||||
|
||||
if env_config_file is not None:
|
||||
yield kinds.ENV, [env_config_file]
|
||||
else:
|
||||
yield kinds.ENV, []
|
||||
|
||||
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
|
||||
"""Get values present in a config file"""
|
||||
return self._config[variant]
|
||||
|
||||
def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
|
||||
# Determine which parser to modify
|
||||
assert self.load_only
|
||||
parsers = self._parsers[self.load_only]
|
||||
if not parsers:
|
||||
# This should not happen if everything works correctly.
|
||||
raise ConfigurationError(
|
||||
"Fatal Internal error [id=2]. Please report as a bug."
|
||||
)
|
||||
|
||||
# Use the highest priority parser.
|
||||
return parsers[-1]
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
|
||||
file_parser_tuple = (fname, parser)
|
||||
if file_parser_tuple not in self._modified_parsers:
|
||||
self._modified_parsers.append(file_parser_tuple)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self._dictionary!r})"
|
21
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__init__.py
vendored
Normal file
21
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__init__.py
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.distributions.sdist import SourceDistribution
|
||||
from pip._internal.distributions.wheel import WheelDistribution
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
|
||||
def make_distribution_for_install_requirement(
|
||||
install_req: InstallRequirement,
|
||||
) -> AbstractDistribution:
|
||||
"""Returns a Distribution for the given InstallRequirement"""
|
||||
# Editable requirements will always be source distributions. They use the
|
||||
# legacy logic until we create a modern standard for them.
|
||||
if install_req.editable:
|
||||
return SourceDistribution(install_req)
|
||||
|
||||
# If it's a wheel, it's a WheelDistribution
|
||||
if install_req.is_wheel:
|
||||
return WheelDistribution(install_req)
|
||||
|
||||
# Otherwise, a SourceDistribution
|
||||
return SourceDistribution(install_req)
|
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-313.pyc
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-313.pyc
vendored
Normal file
Binary file not shown.
53
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/base.py
vendored
Normal file
53
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/base.py
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
import abc
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from pip._internal.metadata.base import BaseDistribution
|
||||
from pip._internal.req import InstallRequirement
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
|
||||
class AbstractDistribution(metaclass=abc.ABCMeta):
|
||||
"""A base class for handling installable artifacts.
|
||||
|
||||
The requirements for anything installable are as follows:
|
||||
|
||||
- we must be able to determine the requirement name
|
||||
(or we can't correctly handle the non-upgrade case).
|
||||
|
||||
- for packages with setup requirements, we must also be able
|
||||
to determine their requirements without installing additional
|
||||
packages (for the same reason as run-time dependencies)
|
||||
|
||||
- we must be able to create a Distribution object exposing the
|
||||
above metadata.
|
||||
|
||||
- if we need to do work in the build tracker, we must be able to generate a unique
|
||||
string to identify the requirement in the build tracker.
|
||||
"""
|
||||
|
||||
def __init__(self, req: InstallRequirement) -> None:
|
||||
super().__init__()
|
||||
self.req = req
|
||||
|
||||
@abc.abstractproperty
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
"""A string that uniquely identifies this requirement to the build tracker.
|
||||
|
||||
If None, then this dist has no work to do in the build tracker, and
|
||||
``.prepare_distribution_metadata()`` will not be called."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def prepare_distribution_metadata(
|
||||
self,
|
||||
finder: "PackageFinder",
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
raise NotImplementedError()
|
29
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/installed.py
vendored
Normal file
29
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/installed.py
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
from typing import Optional
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
|
||||
|
||||
class InstalledDistribution(AbstractDistribution):
|
||||
"""Represents an installed package.
|
||||
|
||||
This does not need any preparation as the required information has already
|
||||
been computed.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
assert self.req.satisfied_by is not None, "not actually installed"
|
||||
return self.req.satisfied_by
|
||||
|
||||
def prepare_distribution_metadata(
|
||||
self,
|
||||
finder: PackageFinder,
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
pass
|
158
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/sdist.py
vendored
Normal file
158
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/sdist.py
vendored
Normal file
@ -0,0 +1,158 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Iterable, Optional, Set, Tuple
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SourceDistribution(AbstractDistribution):
|
||||
"""Represents a source distribution.
|
||||
|
||||
The preparation step for these needs metadata for the packages to be
|
||||
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
"""Identify this requirement uniquely by its link."""
|
||||
assert self.req.link
|
||||
return self.req.link.url_without_fragment
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
return self.req.get_dist()
|
||||
|
||||
def prepare_distribution_metadata(
|
||||
self,
|
||||
finder: "PackageFinder",
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
||||
self.req.load_pyproject_toml()
|
||||
|
||||
# Set up the build isolation, if this requirement should be isolated
|
||||
should_isolate = self.req.use_pep517 and build_isolation
|
||||
if should_isolate:
|
||||
# Setup an isolated environment and install the build backend static
|
||||
# requirements in it.
|
||||
self._prepare_build_backend(finder)
|
||||
# Check that if the requirement is editable, it either supports PEP 660 or
|
||||
# has a setup.py or a setup.cfg. This cannot be done earlier because we need
|
||||
# to setup the build backend to verify it supports build_editable, nor can
|
||||
# it be done later, because we want to avoid installing build requirements
|
||||
# needlessly. Doing it here also works around setuptools generating
|
||||
# UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
|
||||
# without setup.py nor setup.cfg.
|
||||
self.req.isolated_editable_sanity_check()
|
||||
# Install the dynamic build requirements.
|
||||
self._install_build_reqs(finder)
|
||||
# Check if the current environment provides build dependencies
|
||||
should_check_deps = self.req.use_pep517 and check_build_deps
|
||||
if should_check_deps:
|
||||
pyproject_requires = self.req.pyproject_requires
|
||||
assert pyproject_requires is not None
|
||||
conflicting, missing = self.req.build_env.check_requirements(
|
||||
pyproject_requires
|
||||
)
|
||||
if conflicting:
|
||||
self._raise_conflicts("the backend dependencies", conflicting)
|
||||
if missing:
|
||||
self._raise_missing_reqs(missing)
|
||||
self.req.prepare_metadata()
|
||||
|
||||
def _prepare_build_backend(self, finder: "PackageFinder") -> None:
|
||||
# Isolate in a BuildEnvironment and install the build-time
|
||||
# requirements.
|
||||
pyproject_requires = self.req.pyproject_requires
|
||||
assert pyproject_requires is not None
|
||||
|
||||
self.req.build_env = BuildEnvironment()
|
||||
self.req.build_env.install_requirements(
|
||||
finder, pyproject_requires, "overlay", kind="build dependencies"
|
||||
)
|
||||
conflicting, missing = self.req.build_env.check_requirements(
|
||||
self.req.requirements_to_check
|
||||
)
|
||||
if conflicting:
|
||||
self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
||||
if missing:
|
||||
logger.warning(
|
||||
"Missing build requirements in pyproject.toml for %s.",
|
||||
self.req,
|
||||
)
|
||||
logger.warning(
|
||||
"The project does not specify a build backend, and "
|
||||
"pip cannot fall back to setuptools without %s.",
|
||||
" and ".join(map(repr, sorted(missing))),
|
||||
)
|
||||
|
||||
def _get_build_requires_wheel(self) -> Iterable[str]:
|
||||
with self.req.build_env:
|
||||
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
||||
backend = self.req.pep517_backend
|
||||
assert backend is not None
|
||||
with backend.subprocess_runner(runner):
|
||||
return backend.get_requires_for_build_wheel()
|
||||
|
||||
def _get_build_requires_editable(self) -> Iterable[str]:
|
||||
with self.req.build_env:
|
||||
runner = runner_with_spinner_message(
|
||||
"Getting requirements to build editable"
|
||||
)
|
||||
backend = self.req.pep517_backend
|
||||
assert backend is not None
|
||||
with backend.subprocess_runner(runner):
|
||||
return backend.get_requires_for_build_editable()
|
||||
|
||||
def _install_build_reqs(self, finder: "PackageFinder") -> None:
|
||||
# Install any extra build dependencies that the backend requests.
|
||||
# This must be done in a second pass, as the pyproject.toml
|
||||
# dependencies must be installed before we can call the backend.
|
||||
if (
|
||||
self.req.editable
|
||||
and self.req.permit_editable_wheels
|
||||
and self.req.supports_pyproject_editable
|
||||
):
|
||||
build_reqs = self._get_build_requires_editable()
|
||||
else:
|
||||
build_reqs = self._get_build_requires_wheel()
|
||||
conflicting, missing = self.req.build_env.check_requirements(build_reqs)
|
||||
if conflicting:
|
||||
self._raise_conflicts("the backend dependencies", conflicting)
|
||||
self.req.build_env.install_requirements(
|
||||
finder, missing, "normal", kind="backend dependencies"
|
||||
)
|
||||
|
||||
def _raise_conflicts(
|
||||
self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
||||
) -> None:
|
||||
format_string = (
|
||||
"Some build dependencies for {requirement} "
|
||||
"conflict with {conflicting_with}: {description}."
|
||||
)
|
||||
error_message = format_string.format(
|
||||
requirement=self.req,
|
||||
conflicting_with=conflicting_with,
|
||||
description=", ".join(
|
||||
f"{installed} is incompatible with {wanted}"
|
||||
for installed, wanted in sorted(conflicting_reqs)
|
||||
),
|
||||
)
|
||||
raise InstallationError(error_message)
|
||||
|
||||
def _raise_missing_reqs(self, missing: Set[str]) -> None:
|
||||
format_string = (
|
||||
"Some build dependencies for {requirement} are missing: {missing}."
|
||||
)
|
||||
error_message = format_string.format(
|
||||
requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
|
||||
)
|
||||
raise InstallationError(error_message)
|
42
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/wheel.py
vendored
Normal file
42
Dependencies/Python/Lib/site-packages/pip/_internal/distributions/wheel.py
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.metadata import (
|
||||
BaseDistribution,
|
||||
FilesystemWheel,
|
||||
get_wheel_distribution,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
|
||||
class WheelDistribution(AbstractDistribution):
|
||||
"""Represents a wheel distribution.
|
||||
|
||||
This does not need any preparation as wheels can be directly unpacked.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
"""Loads the metadata from the wheel file into memory and returns a
|
||||
Distribution that uses it, not relying on the wheel file or
|
||||
requirement.
|
||||
"""
|
||||
assert self.req.local_file_path, "Set as part of preparation during download"
|
||||
assert self.req.name, "Wheels are never unnamed"
|
||||
wheel = FilesystemWheel(self.req.local_file_path)
|
||||
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
|
||||
|
||||
def prepare_distribution_metadata(
|
||||
self,
|
||||
finder: "PackageFinder",
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
pass
|
809
Dependencies/Python/Lib/site-packages/pip/_internal/exceptions.py
vendored
Normal file
809
Dependencies/Python/Lib/site-packages/pip/_internal/exceptions.py
vendored
Normal file
@ -0,0 +1,809 @@
|
||||
"""Exceptions used throughout package.
|
||||
|
||||
This module MUST NOT try to import from anything within `pip._internal` to
|
||||
operate. This is expected to be importable from any/all files within the
|
||||
subpackage and, thus, should not depend on them.
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import contextlib
|
||||
import locale
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from itertools import chain, groupby, repeat
|
||||
from typing import TYPE_CHECKING, Dict, Iterator, List, Literal, Optional, Union
|
||||
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement
|
||||
from pip._vendor.packaging.version import InvalidVersion
|
||||
from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
|
||||
from pip._vendor.rich.markup import escape
|
||||
from pip._vendor.rich.text import Text
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hashlib import _Hash
|
||||
|
||||
from pip._vendor.requests.models import Request, Response
|
||||
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
#
|
||||
# Scaffolding
|
||||
#
|
||||
def _is_kebab_case(s: str) -> bool:
|
||||
return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
|
||||
|
||||
|
||||
def _prefix_with_indent(
|
||||
s: Union[Text, str],
|
||||
console: Console,
|
||||
*,
|
||||
prefix: str,
|
||||
indent: str,
|
||||
) -> Text:
|
||||
if isinstance(s, Text):
|
||||
text = s
|
||||
else:
|
||||
text = console.render_str(s)
|
||||
|
||||
return console.render_str(prefix, overflow="ignore") + console.render_str(
|
||||
f"\n{indent}", overflow="ignore"
|
||||
).join(text.split(allow_blank=True))
|
||||
|
||||
|
||||
class PipError(Exception):
|
||||
"""The base pip error."""
|
||||
|
||||
|
||||
class DiagnosticPipError(PipError):
|
||||
"""An error, that presents diagnostic information to the user.
|
||||
|
||||
This contains a bunch of logic, to enable pretty presentation of our error
|
||||
messages. Each error gets a unique reference. Each error can also include
|
||||
additional context, a hint and/or a note -- which are presented with the
|
||||
main error message in a consistent style.
|
||||
|
||||
This is adapted from the error output styling in `sphinx-theme-builder`.
|
||||
"""
|
||||
|
||||
reference: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
kind: 'Literal["error", "warning"]' = "error",
|
||||
reference: Optional[str] = None,
|
||||
message: Union[str, Text],
|
||||
context: Optional[Union[str, Text]],
|
||||
hint_stmt: Optional[Union[str, Text]],
|
||||
note_stmt: Optional[Union[str, Text]] = None,
|
||||
link: Optional[str] = None,
|
||||
) -> None:
|
||||
# Ensure a proper reference is provided.
|
||||
if reference is None:
|
||||
assert hasattr(self, "reference"), "error reference not provided!"
|
||||
reference = self.reference
|
||||
assert _is_kebab_case(reference), "error reference must be kebab-case!"
|
||||
|
||||
self.kind = kind
|
||||
self.reference = reference
|
||||
|
||||
self.message = message
|
||||
self.context = context
|
||||
|
||||
self.note_stmt = note_stmt
|
||||
self.hint_stmt = hint_stmt
|
||||
|
||||
self.link = link
|
||||
|
||||
super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<{self.__class__.__name__}("
|
||||
f"reference={self.reference!r}, "
|
||||
f"message={self.message!r}, "
|
||||
f"context={self.context!r}, "
|
||||
f"note_stmt={self.note_stmt!r}, "
|
||||
f"hint_stmt={self.hint_stmt!r}"
|
||||
")>"
|
||||
)
|
||||
|
||||
def __rich_console__(
|
||||
self,
|
||||
console: Console,
|
||||
options: ConsoleOptions,
|
||||
) -> RenderResult:
|
||||
colour = "red" if self.kind == "error" else "yellow"
|
||||
|
||||
yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
|
||||
yield ""
|
||||
|
||||
if not options.ascii_only:
|
||||
# Present the main message, with relevant context indented.
|
||||
if self.context is not None:
|
||||
yield _prefix_with_indent(
|
||||
self.message,
|
||||
console,
|
||||
prefix=f"[{colour}]×[/] ",
|
||||
indent=f"[{colour}]│[/] ",
|
||||
)
|
||||
yield _prefix_with_indent(
|
||||
self.context,
|
||||
console,
|
||||
prefix=f"[{colour}]╰─>[/] ",
|
||||
indent=f"[{colour}] [/] ",
|
||||
)
|
||||
else:
|
||||
yield _prefix_with_indent(
|
||||
self.message,
|
||||
console,
|
||||
prefix="[red]×[/] ",
|
||||
indent=" ",
|
||||
)
|
||||
else:
|
||||
yield self.message
|
||||
if self.context is not None:
|
||||
yield ""
|
||||
yield self.context
|
||||
|
||||
if self.note_stmt is not None or self.hint_stmt is not None:
|
||||
yield ""
|
||||
|
||||
if self.note_stmt is not None:
|
||||
yield _prefix_with_indent(
|
||||
self.note_stmt,
|
||||
console,
|
||||
prefix="[magenta bold]note[/]: ",
|
||||
indent=" ",
|
||||
)
|
||||
if self.hint_stmt is not None:
|
||||
yield _prefix_with_indent(
|
||||
self.hint_stmt,
|
||||
console,
|
||||
prefix="[cyan bold]hint[/]: ",
|
||||
indent=" ",
|
||||
)
|
||||
|
||||
if self.link is not None:
|
||||
yield ""
|
||||
yield f"Link: {self.link}"
|
||||
|
||||
|
||||
#
|
||||
# Actual Errors
|
||||
#
|
||||
class ConfigurationError(PipError):
|
||||
"""General exception in configuration"""
|
||||
|
||||
|
||||
class InstallationError(PipError):
|
||||
"""General exception during installation"""
|
||||
|
||||
|
||||
class MissingPyProjectBuildRequires(DiagnosticPipError):
|
||||
"""Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
|
||||
|
||||
reference = "missing-pyproject-build-system-requires"
|
||||
|
||||
def __init__(self, *, package: str) -> None:
|
||||
super().__init__(
|
||||
message=f"Can not process {escape(package)}",
|
||||
context=Text(
|
||||
"This package has an invalid pyproject.toml file.\n"
|
||||
"The [build-system] table is missing the mandatory `requires` key."
|
||||
),
|
||||
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||
)
|
||||
|
||||
|
||||
class InvalidPyProjectBuildRequires(DiagnosticPipError):
|
||||
"""Raised when pyproject.toml an invalid `build-system.requires`."""
|
||||
|
||||
reference = "invalid-pyproject-build-system-requires"
|
||||
|
||||
def __init__(self, *, package: str, reason: str) -> None:
|
||||
super().__init__(
|
||||
message=f"Can not process {escape(package)}",
|
||||
context=Text(
|
||||
"This package has an invalid `build-system.requires` key in "
|
||||
f"pyproject.toml.\n{reason}"
|
||||
),
|
||||
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||
)
|
||||
|
||||
|
||||
class NoneMetadataError(PipError):
|
||||
"""Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
|
||||
|
||||
This signifies an inconsistency, when the Distribution claims to have
|
||||
the metadata file (if not, raise ``FileNotFoundError`` instead), but is
|
||||
not actually able to produce its content. This may be due to permission
|
||||
errors.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
dist: "BaseDistribution",
|
||||
metadata_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
:param dist: A Distribution object.
|
||||
:param metadata_name: The name of the metadata being accessed
|
||||
(can be "METADATA" or "PKG-INFO").
|
||||
"""
|
||||
self.dist = dist
|
||||
self.metadata_name = metadata_name
|
||||
|
||||
def __str__(self) -> str:
|
||||
# Use `dist` in the error message because its stringification
|
||||
# includes more information, like the version and location.
|
||||
return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
|
||||
|
||||
|
||||
class UserInstallationInvalid(InstallationError):
|
||||
"""A --user install is requested on an environment without user site."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "User base directory is not specified"
|
||||
|
||||
|
||||
class InvalidSchemeCombination(InstallationError):
|
||||
def __str__(self) -> str:
|
||||
before = ", ".join(str(a) for a in self.args[:-1])
|
||||
return f"Cannot set {before} and {self.args[-1]} together"
|
||||
|
||||
|
||||
class DistributionNotFound(InstallationError):
|
||||
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
||||
|
||||
|
||||
class RequirementsFileParseError(InstallationError):
|
||||
"""Raised when a general error occurs parsing a requirements file line."""
|
||||
|
||||
|
||||
class BestVersionAlreadyInstalled(PipError):
|
||||
"""Raised when the most up-to-date version of a package is already
|
||||
installed."""
|
||||
|
||||
|
||||
class BadCommand(PipError):
|
||||
"""Raised when virtualenv or a command is not found"""
|
||||
|
||||
|
||||
class CommandError(PipError):
|
||||
"""Raised when there is an error in command-line arguments"""
|
||||
|
||||
|
||||
class PreviousBuildDirError(PipError):
|
||||
"""Raised when there's a previous conflicting build directory"""
|
||||
|
||||
|
||||
class NetworkConnectionError(PipError):
|
||||
"""HTTP connection error"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
error_msg: str,
|
||||
response: Optional["Response"] = None,
|
||||
request: Optional["Request"] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize NetworkConnectionError with `request` and `response`
|
||||
objects.
|
||||
"""
|
||||
self.response = response
|
||||
self.request = request
|
||||
self.error_msg = error_msg
|
||||
if (
|
||||
self.response is not None
|
||||
and not self.request
|
||||
and hasattr(response, "request")
|
||||
):
|
||||
self.request = self.response.request
|
||||
super().__init__(error_msg, response, request)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.error_msg)
|
||||
|
||||
|
||||
class InvalidWheelFilename(InstallationError):
|
||||
"""Invalid wheel filename."""
|
||||
|
||||
|
||||
class UnsupportedWheel(InstallationError):
|
||||
"""Unsupported wheel."""
|
||||
|
||||
|
||||
class InvalidWheel(InstallationError):
|
||||
"""Invalid (e.g. corrupt) wheel."""
|
||||
|
||||
def __init__(self, location: str, name: str):
|
||||
self.location = location
|
||||
self.name = name
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Wheel '{self.name}' located at {self.location} is invalid."
|
||||
|
||||
|
||||
class MetadataInconsistent(InstallationError):
|
||||
"""Built metadata contains inconsistent information.
|
||||
|
||||
This is raised when the metadata contains values (e.g. name and version)
|
||||
that do not match the information previously obtained from sdist filename,
|
||||
user-supplied ``#egg=`` value, or an install requirement name.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
|
||||
) -> None:
|
||||
self.ireq = ireq
|
||||
self.field = field
|
||||
self.f_val = f_val
|
||||
self.m_val = m_val
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
f"Requested {self.ireq} has inconsistent {self.field}: "
|
||||
f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
|
||||
)
|
||||
|
||||
|
||||
class MetadataInvalid(InstallationError):
|
||||
"""Metadata is invalid."""
|
||||
|
||||
def __init__(self, ireq: "InstallRequirement", error: str) -> None:
|
||||
self.ireq = ireq
|
||||
self.error = error
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Requested {self.ireq} has invalid metadata: {self.error}"
|
||||
|
||||
|
||||
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
|
||||
"""A subprocess call failed."""
|
||||
|
||||
reference = "subprocess-exited-with-error"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
command_description: str,
|
||||
exit_code: int,
|
||||
output_lines: Optional[List[str]],
|
||||
) -> None:
|
||||
if output_lines is None:
|
||||
output_prompt = Text("See above for output.")
|
||||
else:
|
||||
output_prompt = (
|
||||
Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
|
||||
+ Text("".join(output_lines))
|
||||
+ Text.from_markup(R"[red]\[end of output][/]")
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
message=(
|
||||
f"[green]{escape(command_description)}[/] did not run successfully.\n"
|
||||
f"exit code: {exit_code}"
|
||||
),
|
||||
context=output_prompt,
|
||||
hint_stmt=None,
|
||||
note_stmt=(
|
||||
"This error originates from a subprocess, and is likely not a "
|
||||
"problem with pip."
|
||||
),
|
||||
)
|
||||
|
||||
self.command_description = command_description
|
||||
self.exit_code = exit_code
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.command_description} exited with {self.exit_code}"
|
||||
|
||||
|
||||
class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
|
||||
reference = "metadata-generation-failed"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
package_details: str,
|
||||
) -> None:
|
||||
super(InstallationSubprocessError, self).__init__(
|
||||
message="Encountered error while generating package metadata.",
|
||||
context=escape(package_details),
|
||||
hint_stmt="See above for details.",
|
||||
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "metadata generation failed"
|
||||
|
||||
|
||||
class HashErrors(InstallationError):
|
||||
"""Multiple HashError instances rolled into one for reporting"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.errors: List[HashError] = []
|
||||
|
||||
def append(self, error: "HashError") -> None:
|
||||
self.errors.append(error)
|
||||
|
||||
def __str__(self) -> str:
|
||||
lines = []
|
||||
self.errors.sort(key=lambda e: e.order)
|
||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||
lines.append(cls.head)
|
||||
lines.extend(e.body() for e in errors_of_cls)
|
||||
if lines:
|
||||
return "\n".join(lines)
|
||||
return ""
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self.errors)
|
||||
|
||||
|
||||
class HashError(InstallationError):
|
||||
"""
|
||||
A failure to verify a package against known-good hashes
|
||||
|
||||
:cvar order: An int sorting hash exception classes by difficulty of
|
||||
recovery (lower being harder), so the user doesn't bother fretting
|
||||
about unpinned packages when he has deeper issues, like VCS
|
||||
dependencies, to deal with. Also keeps error reports in a
|
||||
deterministic order.
|
||||
:cvar head: A section heading for display above potentially many
|
||||
exceptions of this kind
|
||||
:ivar req: The InstallRequirement that triggered this error. This is
|
||||
pasted on after the exception is instantiated, because it's not
|
||||
typically available earlier.
|
||||
|
||||
"""
|
||||
|
||||
req: Optional["InstallRequirement"] = None
|
||||
head = ""
|
||||
order: int = -1
|
||||
|
||||
def body(self) -> str:
|
||||
"""Return a summary of me for display under the heading.
|
||||
|
||||
This default implementation simply prints a description of the
|
||||
triggering requirement.
|
||||
|
||||
:param req: The InstallRequirement that provoked this error, with
|
||||
its link already populated by the resolver's _populate_link().
|
||||
|
||||
"""
|
||||
return f" {self._requirement_name()}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.head}\n{self.body()}"
|
||||
|
||||
def _requirement_name(self) -> str:
|
||||
"""Return a description of the requirement that triggered me.
|
||||
|
||||
This default implementation returns long description of the req, with
|
||||
line numbers
|
||||
|
||||
"""
|
||||
return str(self.req) if self.req else "unknown package"
|
||||
|
||||
|
||||
class VcsHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 0
|
||||
head = (
|
||||
"Can't verify hashes for these requirements because we don't "
|
||||
"have a way to hash version control repositories:"
|
||||
)
|
||||
|
||||
|
||||
class DirectoryUrlHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 1
|
||||
head = (
|
||||
"Can't verify hashes for these file:// requirements because they "
|
||||
"point to directories:"
|
||||
)
|
||||
|
||||
|
||||
class HashMissing(HashError):
|
||||
"""A hash was needed for a requirement but is absent."""
|
||||
|
||||
order = 2
|
||||
head = (
|
||||
"Hashes are required in --require-hashes mode, but they are "
|
||||
"missing from some requirements. Here is a list of those "
|
||||
"requirements along with the hashes their downloaded archives "
|
||||
"actually had. Add lines like these to your requirements files to "
|
||||
"prevent tampering. (If you did not enable --require-hashes "
|
||||
"manually, note that it turns on automatically when any package "
|
||||
"has a hash.)"
|
||||
)
|
||||
|
||||
def __init__(self, gotten_hash: str) -> None:
|
||||
"""
|
||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||
just downloaded
|
||||
"""
|
||||
self.gotten_hash = gotten_hash
|
||||
|
||||
def body(self) -> str:
|
||||
# Dodge circular import.
|
||||
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||
|
||||
package = None
|
||||
if self.req:
|
||||
# In the case of URL-based requirements, display the original URL
|
||||
# seen in the requirements file rather than the package name,
|
||||
# so the output can be directly copied into the requirements file.
|
||||
package = (
|
||||
self.req.original_link
|
||||
if self.req.is_direct
|
||||
# In case someone feeds something downright stupid
|
||||
# to InstallRequirement's constructor.
|
||||
else getattr(self.req, "req", None)
|
||||
)
|
||||
return " {} --hash={}:{}".format(
|
||||
package or "unknown package", FAVORITE_HASH, self.gotten_hash
|
||||
)
|
||||
|
||||
|
||||
class HashUnpinned(HashError):
|
||||
"""A requirement had a hash specified but was not pinned to a specific
|
||||
version."""
|
||||
|
||||
order = 3
|
||||
head = (
|
||||
"In --require-hashes mode, all requirements must have their "
|
||||
"versions pinned with ==. These do not:"
|
||||
)
|
||||
|
||||
|
||||
class HashMismatch(HashError):
|
||||
"""
|
||||
Distribution file hash values don't match.
|
||||
|
||||
:ivar package_name: The name of the package that triggered the hash
|
||||
mismatch. Feel free to write to this after the exception is raise to
|
||||
improve its error message.
|
||||
|
||||
"""
|
||||
|
||||
order = 4
|
||||
head = (
|
||||
"THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
|
||||
"FILE. If you have updated the package versions, please update "
|
||||
"the hashes. Otherwise, examine the package contents carefully; "
|
||||
"someone may have tampered with them."
|
||||
)
|
||||
|
||||
def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
|
||||
"""
|
||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
:param gots: A dict of algorithm names pointing to hashes we
|
||||
actually got from the files under suspicion
|
||||
"""
|
||||
self.allowed = allowed
|
||||
self.gots = gots
|
||||
|
||||
def body(self) -> str:
|
||||
return f" {self._requirement_name()}:\n{self._hash_comparison()}"
|
||||
|
||||
def _hash_comparison(self) -> str:
|
||||
"""
|
||||
Return a comparison of actual and expected hash values.
|
||||
|
||||
Example::
|
||||
|
||||
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
||||
or 123451234512345123451234512345123451234512345
|
||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||
|
||||
"""
|
||||
|
||||
def hash_then_or(hash_name: str) -> "chain[str]":
|
||||
# For now, all the decent hashes have 6-char names, so we can get
|
||||
# away with hard-coding space literals.
|
||||
return chain([hash_name], repeat(" or"))
|
||||
|
||||
lines: List[str] = []
|
||||
for hash_name, expecteds in self.allowed.items():
|
||||
prefix = hash_then_or(hash_name)
|
||||
lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
|
||||
lines.append(
|
||||
f" Got {self.gots[hash_name].hexdigest()}\n"
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
class UnsupportedPythonVersion(InstallationError):
|
||||
"""Unsupported python version according to Requires-Python package
|
||||
metadata."""
|
||||
|
||||
|
||||
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
||||
"""When there are errors while loading a configuration file"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
reason: str = "could not be loaded",
|
||||
fname: Optional[str] = None,
|
||||
error: Optional[configparser.Error] = None,
|
||||
) -> None:
|
||||
super().__init__(error)
|
||||
self.reason = reason
|
||||
self.fname = fname
|
||||
self.error = error
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.fname is not None:
|
||||
message_part = f" in {self.fname}."
|
||||
else:
|
||||
assert self.error is not None
|
||||
message_part = f".\n{self.error}\n"
|
||||
return f"Configuration file {self.reason}{message_part}"
|
||||
|
||||
|
||||
_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
|
||||
The Python environment under {sys.prefix} is managed externally, and may not be
|
||||
manipulated by the user. Please use specific tooling from the distributor of
|
||||
the Python installation to interact with this environment instead.
|
||||
"""
|
||||
|
||||
|
||||
class ExternallyManagedEnvironment(DiagnosticPipError):
|
||||
"""The current environment is externally managed.
|
||||
|
||||
This is raised when the current environment is externally managed, as
|
||||
defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
|
||||
and displayed when the error is bubbled up to the user.
|
||||
|
||||
:param error: The error message read from ``EXTERNALLY-MANAGED``.
|
||||
"""
|
||||
|
||||
reference = "externally-managed-environment"
|
||||
|
||||
def __init__(self, error: Optional[str]) -> None:
|
||||
if error is None:
|
||||
context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
|
||||
else:
|
||||
context = Text(error)
|
||||
super().__init__(
|
||||
message="This environment is externally managed",
|
||||
context=context,
|
||||
note_stmt=(
|
||||
"If you believe this is a mistake, please contact your "
|
||||
"Python installation or OS distribution provider. "
|
||||
"You can override this, at the risk of breaking your Python "
|
||||
"installation or OS, by passing --break-system-packages."
|
||||
),
|
||||
hint_stmt=Text("See PEP 668 for the detailed specification."),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _iter_externally_managed_error_keys() -> Iterator[str]:
|
||||
# LC_MESSAGES is in POSIX, but not the C standard. The most common
|
||||
# platform that does not implement this category is Windows, where
|
||||
# using other categories for console message localization is equally
|
||||
# unreliable, so we fall back to the locale-less vendor message. This
|
||||
# can always be re-evaluated when a vendor proposes a new alternative.
|
||||
try:
|
||||
category = locale.LC_MESSAGES
|
||||
except AttributeError:
|
||||
lang: Optional[str] = None
|
||||
else:
|
||||
lang, _ = locale.getlocale(category)
|
||||
if lang is not None:
|
||||
yield f"Error-{lang}"
|
||||
for sep in ("-", "_"):
|
||||
before, found, _ = lang.partition(sep)
|
||||
if not found:
|
||||
continue
|
||||
yield f"Error-{before}"
|
||||
yield "Error"
|
||||
|
||||
@classmethod
|
||||
def from_config(
|
||||
cls,
|
||||
config: Union[pathlib.Path, str],
|
||||
) -> "ExternallyManagedEnvironment":
|
||||
parser = configparser.ConfigParser(interpolation=None)
|
||||
try:
|
||||
parser.read(config, encoding="utf-8")
|
||||
section = parser["externally-managed"]
|
||||
for key in cls._iter_externally_managed_error_keys():
|
||||
with contextlib.suppress(KeyError):
|
||||
return cls(section[key])
|
||||
except KeyError:
|
||||
pass
|
||||
except (OSError, UnicodeDecodeError, configparser.ParsingError):
|
||||
from pip._internal.utils._log import VERBOSE
|
||||
|
||||
exc_info = logger.isEnabledFor(VERBOSE)
|
||||
logger.warning("Failed to read %s", config, exc_info=exc_info)
|
||||
return cls(None)
|
||||
|
||||
|
||||
class UninstallMissingRecord(DiagnosticPipError):
|
||||
reference = "uninstall-no-record-file"
|
||||
|
||||
def __init__(self, *, distribution: "BaseDistribution") -> None:
|
||||
installer = distribution.installer
|
||||
if not installer or installer == "pip":
|
||||
dep = f"{distribution.raw_name}=={distribution.version}"
|
||||
hint = Text.assemble(
|
||||
"You might be able to recover from this via: ",
|
||||
(f"pip install --force-reinstall --no-deps {dep}", "green"),
|
||||
)
|
||||
else:
|
||||
hint = Text(
|
||||
f"The package was installed by {installer}. "
|
||||
"You should check if it can uninstall the package."
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
message=Text(f"Cannot uninstall {distribution}"),
|
||||
context=(
|
||||
"The package's contents are unknown: "
|
||||
f"no RECORD file was found for {distribution.raw_name}."
|
||||
),
|
||||
hint_stmt=hint,
|
||||
)
|
||||
|
||||
|
||||
class LegacyDistutilsInstall(DiagnosticPipError):
|
||||
reference = "uninstall-distutils-installed-package"
|
||||
|
||||
def __init__(self, *, distribution: "BaseDistribution") -> None:
|
||||
super().__init__(
|
||||
message=Text(f"Cannot uninstall {distribution}"),
|
||||
context=(
|
||||
"It is a distutils installed project and thus we cannot accurately "
|
||||
"determine which files belong to it which would lead to only a partial "
|
||||
"uninstall."
|
||||
),
|
||||
hint_stmt=None,
|
||||
)
|
||||
|
||||
|
||||
class InvalidInstalledPackage(DiagnosticPipError):
|
||||
reference = "invalid-installed-package"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
dist: "BaseDistribution",
|
||||
invalid_exc: Union[InvalidRequirement, InvalidVersion],
|
||||
) -> None:
|
||||
installed_location = dist.installed_location
|
||||
|
||||
if isinstance(invalid_exc, InvalidRequirement):
|
||||
invalid_type = "requirement"
|
||||
else:
|
||||
invalid_type = "version"
|
||||
|
||||
super().__init__(
|
||||
message=Text(
|
||||
f"Cannot process installed package {dist} "
|
||||
+ (f"in {installed_location!r} " if installed_location else "")
|
||||
+ f"because it has an invalid {invalid_type}:\n{invalid_exc.args[0]}"
|
||||
),
|
||||
context=(
|
||||
"Starting with pip 24.1, packages with invalid "
|
||||
f"{invalid_type}s can not be processed."
|
||||
),
|
||||
hint_stmt="To proceed this package must be uninstalled.",
|
||||
)
|
2
Dependencies/Python/Lib/site-packages/pip/_internal/index/__init__.py
vendored
Normal file
2
Dependencies/Python/Lib/site-packages/pip/_internal/index/__init__.py
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
"""Index interaction code
|
||||
"""
|
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-313.pyc
vendored
Normal file
Binary file not shown.
494
Dependencies/Python/Lib/site-packages/pip/_internal/index/collector.py
vendored
Normal file
494
Dependencies/Python/Lib/site-packages/pip/_internal/index/collector.py
vendored
Normal file
@ -0,0 +1,494 @@
|
||||
"""
|
||||
The main purpose of this module is to expose LinkCollector.collect_sources().
|
||||
"""
|
||||
|
||||
import collections
|
||||
import email.message
|
||||
import functools
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from dataclasses import dataclass
|
||||
from html.parser import HTMLParser
|
||||
from optparse import Values
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
MutableMapping,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._vendor import requests
|
||||
from pip._vendor.requests import Response
|
||||
from pip._vendor.requests.exceptions import RetryError, SSLError
|
||||
|
||||
from pip._internal.exceptions import NetworkConnectionError
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.search_scope import SearchScope
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.network.utils import raise_for_status
|
||||
from pip._internal.utils.filetypes import is_archive_file
|
||||
from pip._internal.utils.misc import redact_auth_from_url
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
from .sources import CandidatesFromPage, LinkSource, build_source
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ResponseHeaders = MutableMapping[str, str]
|
||||
|
||||
|
||||
def _match_vcs_scheme(url: str) -> Optional[str]:
|
||||
"""Look for VCS schemes in the URL.
|
||||
|
||||
Returns the matched VCS scheme, or None if there's no match.
|
||||
"""
|
||||
for scheme in vcs.schemes:
|
||||
if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
|
||||
return scheme
|
||||
return None
|
||||
|
||||
|
||||
class _NotAPIContent(Exception):
|
||||
def __init__(self, content_type: str, request_desc: str) -> None:
|
||||
super().__init__(content_type, request_desc)
|
||||
self.content_type = content_type
|
||||
self.request_desc = request_desc
|
||||
|
||||
|
||||
def _ensure_api_header(response: Response) -> None:
|
||||
"""
|
||||
Check the Content-Type header to ensure the response contains a Simple
|
||||
API Response.
|
||||
|
||||
Raises `_NotAPIContent` if the content type is not a valid content-type.
|
||||
"""
|
||||
content_type = response.headers.get("Content-Type", "Unknown")
|
||||
|
||||
content_type_l = content_type.lower()
|
||||
if content_type_l.startswith(
|
||||
(
|
||||
"text/html",
|
||||
"application/vnd.pypi.simple.v1+html",
|
||||
"application/vnd.pypi.simple.v1+json",
|
||||
)
|
||||
):
|
||||
return
|
||||
|
||||
raise _NotAPIContent(content_type, response.request.method)
|
||||
|
||||
|
||||
class _NotHTTP(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _ensure_api_response(url: str, session: PipSession) -> None:
|
||||
"""
|
||||
Send a HEAD request to the URL, and ensure the response contains a simple
|
||||
API Response.
|
||||
|
||||
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
||||
`_NotAPIContent` if the content type is not a valid content type.
|
||||
"""
|
||||
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
||||
if scheme not in {"http", "https"}:
|
||||
raise _NotHTTP()
|
||||
|
||||
resp = session.head(url, allow_redirects=True)
|
||||
raise_for_status(resp)
|
||||
|
||||
_ensure_api_header(resp)
|
||||
|
||||
|
||||
def _get_simple_response(url: str, session: PipSession) -> Response:
|
||||
"""Access an Simple API response with GET, and return the response.
|
||||
|
||||
This consists of three parts:
|
||||
|
||||
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
||||
check the Content-Type is HTML or Simple API, to avoid downloading a
|
||||
large file. Raise `_NotHTTP` if the content type cannot be determined, or
|
||||
`_NotAPIContent` if it is not HTML or a Simple API.
|
||||
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
||||
3. Check the Content-Type header to make sure we got a Simple API response,
|
||||
and raise `_NotAPIContent` otherwise.
|
||||
"""
|
||||
if is_archive_file(Link(url).filename):
|
||||
_ensure_api_response(url, session=session)
|
||||
|
||||
logger.debug("Getting page %s", redact_auth_from_url(url))
|
||||
|
||||
resp = session.get(
|
||||
url,
|
||||
headers={
|
||||
"Accept": ", ".join(
|
||||
[
|
||||
"application/vnd.pypi.simple.v1+json",
|
||||
"application/vnd.pypi.simple.v1+html; q=0.1",
|
||||
"text/html; q=0.01",
|
||||
]
|
||||
),
|
||||
# We don't want to blindly returned cached data for
|
||||
# /simple/, because authors generally expecting that
|
||||
# twine upload && pip install will function, but if
|
||||
# they've done a pip install in the last ~10 minutes
|
||||
# it won't. Thus by setting this to zero we will not
|
||||
# blindly use any cached data, however the benefit of
|
||||
# using max-age=0 instead of no-cache, is that we will
|
||||
# still support conditional requests, so we will still
|
||||
# minimize traffic sent in cases where the page hasn't
|
||||
# changed at all, we will just always incur the round
|
||||
# trip for the conditional GET now instead of only
|
||||
# once per 10 minutes.
|
||||
# For more information, please see pypa/pip#5670.
|
||||
"Cache-Control": "max-age=0",
|
||||
},
|
||||
)
|
||||
raise_for_status(resp)
|
||||
|
||||
# The check for archives above only works if the url ends with
|
||||
# something that looks like an archive. However that is not a
|
||||
# requirement of an url. Unless we issue a HEAD request on every
|
||||
# url we cannot know ahead of time for sure if something is a
|
||||
# Simple API response or not. However we can check after we've
|
||||
# downloaded it.
|
||||
_ensure_api_header(resp)
|
||||
|
||||
logger.debug(
|
||||
"Fetched page %s as %s",
|
||||
redact_auth_from_url(url),
|
||||
resp.headers.get("Content-Type", "Unknown"),
|
||||
)
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
|
||||
"""Determine if we have any encoding information in our headers."""
|
||||
if headers and "Content-Type" in headers:
|
||||
m = email.message.Message()
|
||||
m["content-type"] = headers["Content-Type"]
|
||||
charset = m.get_param("charset")
|
||||
if charset:
|
||||
return str(charset)
|
||||
return None
|
||||
|
||||
|
||||
class CacheablePageContent:
|
||||
def __init__(self, page: "IndexContent") -> None:
|
||||
assert page.cache_link_parsing
|
||||
self.page = page
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, type(self)) and self.page.url == other.page.url
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.page.url)
|
||||
|
||||
|
||||
class ParseLinks(Protocol):
|
||||
def __call__(self, page: "IndexContent") -> Iterable[Link]: ...
|
||||
|
||||
|
||||
def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
|
||||
"""
|
||||
Given a function that parses an Iterable[Link] from an IndexContent, cache the
|
||||
function's result (keyed by CacheablePageContent), unless the IndexContent
|
||||
`page` has `page.cache_link_parsing == False`.
|
||||
"""
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
|
||||
return list(fn(cacheable_page.page))
|
||||
|
||||
@functools.wraps(fn)
|
||||
def wrapper_wrapper(page: "IndexContent") -> List[Link]:
|
||||
if page.cache_link_parsing:
|
||||
return wrapper(CacheablePageContent(page))
|
||||
return list(fn(page))
|
||||
|
||||
return wrapper_wrapper
|
||||
|
||||
|
||||
@with_cached_index_content
|
||||
def parse_links(page: "IndexContent") -> Iterable[Link]:
|
||||
"""
|
||||
Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
|
||||
"""
|
||||
|
||||
content_type_l = page.content_type.lower()
|
||||
if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
|
||||
data = json.loads(page.content)
|
||||
for file in data.get("files", []):
|
||||
link = Link.from_json(file, page.url)
|
||||
if link is None:
|
||||
continue
|
||||
yield link
|
||||
return
|
||||
|
||||
parser = HTMLLinkParser(page.url)
|
||||
encoding = page.encoding or "utf-8"
|
||||
parser.feed(page.content.decode(encoding))
|
||||
|
||||
url = page.url
|
||||
base_url = parser.base_url or url
|
||||
for anchor in parser.anchors:
|
||||
link = Link.from_element(anchor, page_url=url, base_url=base_url)
|
||||
if link is None:
|
||||
continue
|
||||
yield link
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class IndexContent:
|
||||
"""Represents one response (or page), along with its URL.
|
||||
|
||||
:param encoding: the encoding to decode the given content.
|
||||
:param url: the URL from which the HTML was downloaded.
|
||||
:param cache_link_parsing: whether links parsed from this page's url
|
||||
should be cached. PyPI index urls should
|
||||
have this set to False, for example.
|
||||
"""
|
||||
|
||||
content: bytes
|
||||
content_type: str
|
||||
encoding: Optional[str]
|
||||
url: str
|
||||
cache_link_parsing: bool = True
|
||||
|
||||
def __str__(self) -> str:
|
||||
return redact_auth_from_url(self.url)
|
||||
|
||||
|
||||
class HTMLLinkParser(HTMLParser):
|
||||
"""
|
||||
HTMLParser that keeps the first base HREF and a list of all anchor
|
||||
elements' attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, url: str) -> None:
|
||||
super().__init__(convert_charrefs=True)
|
||||
|
||||
self.url: str = url
|
||||
self.base_url: Optional[str] = None
|
||||
self.anchors: List[Dict[str, Optional[str]]] = []
|
||||
|
||||
def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
|
||||
if tag == "base" and self.base_url is None:
|
||||
href = self.get_href(attrs)
|
||||
if href is not None:
|
||||
self.base_url = href
|
||||
elif tag == "a":
|
||||
self.anchors.append(dict(attrs))
|
||||
|
||||
def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
|
||||
for name, value in attrs:
|
||||
if name == "href":
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
def _handle_get_simple_fail(
|
||||
link: Link,
|
||||
reason: Union[str, Exception],
|
||||
meth: Optional[Callable[..., None]] = None,
|
||||
) -> None:
|
||||
if meth is None:
|
||||
meth = logger.debug
|
||||
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
||||
|
||||
|
||||
def _make_index_content(
|
||||
response: Response, cache_link_parsing: bool = True
|
||||
) -> IndexContent:
|
||||
encoding = _get_encoding_from_headers(response.headers)
|
||||
return IndexContent(
|
||||
response.content,
|
||||
response.headers["Content-Type"],
|
||||
encoding=encoding,
|
||||
url=response.url,
|
||||
cache_link_parsing=cache_link_parsing,
|
||||
)
|
||||
|
||||
|
||||
def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
|
||||
url = link.url.split("#", 1)[0]
|
||||
|
||||
# Check for VCS schemes that do not support lookup as web pages.
|
||||
vcs_scheme = _match_vcs_scheme(url)
|
||||
if vcs_scheme:
|
||||
logger.warning(
|
||||
"Cannot look at %s URL %s because it does not support lookup as web pages.",
|
||||
vcs_scheme,
|
||||
link,
|
||||
)
|
||||
return None
|
||||
|
||||
# Tack index.html onto file:// URLs that point to directories
|
||||
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
||||
if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
|
||||
# add trailing slash if not present so urljoin doesn't trim
|
||||
# final segment
|
||||
if not url.endswith("/"):
|
||||
url += "/"
|
||||
# TODO: In the future, it would be nice if pip supported PEP 691
|
||||
# style responses in the file:// URLs, however there's no
|
||||
# standard file extension for application/vnd.pypi.simple.v1+json
|
||||
# so we'll need to come up with something on our own.
|
||||
url = urllib.parse.urljoin(url, "index.html")
|
||||
logger.debug(" file: URL is directory, getting %s", url)
|
||||
|
||||
try:
|
||||
resp = _get_simple_response(url, session=session)
|
||||
except _NotHTTP:
|
||||
logger.warning(
|
||||
"Skipping page %s because it looks like an archive, and cannot "
|
||||
"be checked by a HTTP HEAD request.",
|
||||
link,
|
||||
)
|
||||
except _NotAPIContent as exc:
|
||||
logger.warning(
|
||||
"Skipping page %s because the %s request got Content-Type: %s. "
|
||||
"The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
|
||||
"application/vnd.pypi.simple.v1+html, and text/html",
|
||||
link,
|
||||
exc.request_desc,
|
||||
exc.content_type,
|
||||
)
|
||||
except NetworkConnectionError as exc:
|
||||
_handle_get_simple_fail(link, exc)
|
||||
except RetryError as exc:
|
||||
_handle_get_simple_fail(link, exc)
|
||||
except SSLError as exc:
|
||||
reason = "There was a problem confirming the ssl certificate: "
|
||||
reason += str(exc)
|
||||
_handle_get_simple_fail(link, reason, meth=logger.info)
|
||||
except requests.ConnectionError as exc:
|
||||
_handle_get_simple_fail(link, f"connection error: {exc}")
|
||||
except requests.Timeout:
|
||||
_handle_get_simple_fail(link, "timed out")
|
||||
else:
|
||||
return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
|
||||
return None
|
||||
|
||||
|
||||
class CollectedSources(NamedTuple):
|
||||
find_links: Sequence[Optional[LinkSource]]
|
||||
index_urls: Sequence[Optional[LinkSource]]
|
||||
|
||||
|
||||
class LinkCollector:
|
||||
"""
|
||||
Responsible for collecting Link objects from all configured locations,
|
||||
making network requests as needed.
|
||||
|
||||
The class's main method is its collect_sources() method.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: PipSession,
|
||||
search_scope: SearchScope,
|
||||
) -> None:
|
||||
self.search_scope = search_scope
|
||||
self.session = session
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
session: PipSession,
|
||||
options: Values,
|
||||
suppress_no_index: bool = False,
|
||||
) -> "LinkCollector":
|
||||
"""
|
||||
:param session: The Session to use to make requests.
|
||||
:param suppress_no_index: Whether to ignore the --no-index option
|
||||
when constructing the SearchScope object.
|
||||
"""
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index and not suppress_no_index:
|
||||
logger.debug(
|
||||
"Ignoring indexes: %s",
|
||||
",".join(redact_auth_from_url(url) for url in index_urls),
|
||||
)
|
||||
index_urls = []
|
||||
|
||||
# Make sure find_links is a list before passing to create().
|
||||
find_links = options.find_links or []
|
||||
|
||||
search_scope = SearchScope.create(
|
||||
find_links=find_links,
|
||||
index_urls=index_urls,
|
||||
no_index=options.no_index,
|
||||
)
|
||||
link_collector = LinkCollector(
|
||||
session=session,
|
||||
search_scope=search_scope,
|
||||
)
|
||||
return link_collector
|
||||
|
||||
@property
|
||||
def find_links(self) -> List[str]:
|
||||
return self.search_scope.find_links
|
||||
|
||||
def fetch_response(self, location: Link) -> Optional[IndexContent]:
|
||||
"""
|
||||
Fetch an HTML page containing package links.
|
||||
"""
|
||||
return _get_index_content(location, session=self.session)
|
||||
|
||||
def collect_sources(
|
||||
self,
|
||||
project_name: str,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
) -> CollectedSources:
|
||||
# The OrderedDict calls deduplicate sources by URL.
|
||||
index_url_sources = collections.OrderedDict(
|
||||
build_source(
|
||||
loc,
|
||||
candidates_from_page=candidates_from_page,
|
||||
page_validator=self.session.is_secure_origin,
|
||||
expand_dir=False,
|
||||
cache_link_parsing=False,
|
||||
project_name=project_name,
|
||||
)
|
||||
for loc in self.search_scope.get_index_urls_locations(project_name)
|
||||
).values()
|
||||
find_links_sources = collections.OrderedDict(
|
||||
build_source(
|
||||
loc,
|
||||
candidates_from_page=candidates_from_page,
|
||||
page_validator=self.session.is_secure_origin,
|
||||
expand_dir=True,
|
||||
cache_link_parsing=True,
|
||||
project_name=project_name,
|
||||
)
|
||||
for loc in self.find_links
|
||||
).values()
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
lines = [
|
||||
f"* {s.link}"
|
||||
for s in itertools.chain(find_links_sources, index_url_sources)
|
||||
if s is not None and s.link is not None
|
||||
]
|
||||
lines = [
|
||||
f"{len(lines)} location(s) to search "
|
||||
f"for versions of {project_name}:"
|
||||
] + lines
|
||||
logger.debug("\n".join(lines))
|
||||
|
||||
return CollectedSources(
|
||||
find_links=list(find_links_sources),
|
||||
index_urls=list(index_url_sources),
|
||||
)
|
1029
Dependencies/Python/Lib/site-packages/pip/_internal/index/package_finder.py
vendored
Normal file
1029
Dependencies/Python/Lib/site-packages/pip/_internal/index/package_finder.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
284
Dependencies/Python/Lib/site-packages/pip/_internal/index/sources.py
vendored
Normal file
284
Dependencies/Python/Lib/site-packages/pip/_internal/index/sources.py
vendored
Normal file
@ -0,0 +1,284 @@
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import Callable, Dict, Iterable, List, Optional, Tuple
|
||||
|
||||
from pip._vendor.packaging.utils import (
|
||||
InvalidSdistFilename,
|
||||
InvalidWheelFilename,
|
||||
canonicalize_name,
|
||||
parse_sdist_filename,
|
||||
parse_wheel_filename,
|
||||
)
|
||||
|
||||
from pip._internal.models.candidate import InstallationCandidate
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||
from pip._internal.vcs import is_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
FoundCandidates = Iterable[InstallationCandidate]
|
||||
FoundLinks = Iterable[Link]
|
||||
CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
|
||||
PageValidator = Callable[[Link], bool]
|
||||
|
||||
|
||||
class LinkSource:
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
"""Returns the underlying link, if there's one."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
"""Candidates found by parsing an archive listing HTML file."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
"""Links found by specifying archives directly."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _is_html_file(file_url: str) -> bool:
|
||||
return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
|
||||
|
||||
|
||||
class _FlatDirectoryToUrls:
|
||||
"""Scans directory and caches results"""
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
self._path = path
|
||||
self._page_candidates: List[str] = []
|
||||
self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
|
||||
self._scanned_directory = False
|
||||
|
||||
def _scan_directory(self) -> None:
|
||||
"""Scans directory once and populates both page_candidates
|
||||
and project_name_to_urls at the same time
|
||||
"""
|
||||
for entry in os.scandir(self._path):
|
||||
url = path_to_url(entry.path)
|
||||
if _is_html_file(url):
|
||||
self._page_candidates.append(url)
|
||||
continue
|
||||
|
||||
# File must have a valid wheel or sdist name,
|
||||
# otherwise not worth considering as a package
|
||||
try:
|
||||
project_filename = parse_wheel_filename(entry.name)[0]
|
||||
except InvalidWheelFilename:
|
||||
try:
|
||||
project_filename = parse_sdist_filename(entry.name)[0]
|
||||
except InvalidSdistFilename:
|
||||
continue
|
||||
|
||||
self._project_name_to_urls[project_filename].append(url)
|
||||
self._scanned_directory = True
|
||||
|
||||
@property
|
||||
def page_candidates(self) -> List[str]:
|
||||
if not self._scanned_directory:
|
||||
self._scan_directory()
|
||||
|
||||
return self._page_candidates
|
||||
|
||||
@property
|
||||
def project_name_to_urls(self) -> Dict[str, List[str]]:
|
||||
if not self._scanned_directory:
|
||||
self._scan_directory()
|
||||
|
||||
return self._project_name_to_urls
|
||||
|
||||
|
||||
class _FlatDirectorySource(LinkSource):
|
||||
"""Link source specified by ``--find-links=<path-to-dir>``.
|
||||
|
||||
This looks the content of the directory, and returns:
|
||||
|
||||
* ``page_candidates``: Links listed on each HTML file in the directory.
|
||||
* ``file_candidates``: Archives in the directory.
|
||||
"""
|
||||
|
||||
_paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
path: str,
|
||||
project_name: str,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._project_name = canonicalize_name(project_name)
|
||||
|
||||
# Get existing instance of _FlatDirectoryToUrls if it exists
|
||||
if path in self._paths_to_urls:
|
||||
self._path_to_urls = self._paths_to_urls[path]
|
||||
else:
|
||||
self._path_to_urls = _FlatDirectoryToUrls(path=path)
|
||||
self._paths_to_urls[path] = self._path_to_urls
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return None
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
for url in self._path_to_urls.page_candidates:
|
||||
yield from self._candidates_from_page(Link(url))
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
for url in self._path_to_urls.project_name_to_urls[self._project_name]:
|
||||
yield Link(url)
|
||||
|
||||
|
||||
class _LocalFileSource(LinkSource):
|
||||
"""``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
|
||||
|
||||
If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
|
||||
the option, it is converted to a URL first. This returns:
|
||||
|
||||
* ``page_candidates``: Links listed on an HTML file.
|
||||
* ``file_candidates``: The non-HTML file.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
link: Link,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._link = link
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return self._link
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
if not _is_html_file(self._link.url):
|
||||
return
|
||||
yield from self._candidates_from_page(self._link)
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
if _is_html_file(self._link.url):
|
||||
return
|
||||
yield self._link
|
||||
|
||||
|
||||
class _RemoteFileSource(LinkSource):
|
||||
"""``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
|
||||
|
||||
This returns:
|
||||
|
||||
* ``page_candidates``: Links listed on an HTML file.
|
||||
* ``file_candidates``: The non-HTML file.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
page_validator: PageValidator,
|
||||
link: Link,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._page_validator = page_validator
|
||||
self._link = link
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return self._link
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
if not self._page_validator(self._link):
|
||||
return
|
||||
yield from self._candidates_from_page(self._link)
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
yield self._link
|
||||
|
||||
|
||||
class _IndexDirectorySource(LinkSource):
|
||||
"""``--[extra-]index-url=<path-to-directory>``.
|
||||
|
||||
This is treated like a remote URL; ``candidates_from_page`` contains logic
|
||||
for this by appending ``index.html`` to the link.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
link: Link,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._link = link
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return self._link
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
yield from self._candidates_from_page(self._link)
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
return ()
|
||||
|
||||
|
||||
def build_source(
|
||||
location: str,
|
||||
*,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
page_validator: PageValidator,
|
||||
expand_dir: bool,
|
||||
cache_link_parsing: bool,
|
||||
project_name: str,
|
||||
) -> Tuple[Optional[str], Optional[LinkSource]]:
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
if os.path.exists(location): # Is a local path.
|
||||
url = path_to_url(location)
|
||||
path = location
|
||||
elif location.startswith("file:"): # A file: URL.
|
||||
url = location
|
||||
path = url_to_path(location)
|
||||
elif is_url(location):
|
||||
url = location
|
||||
|
||||
if url is None:
|
||||
msg = (
|
||||
"Location '%s' is ignored: "
|
||||
"it is either a non-existing path or lacks a specific scheme."
|
||||
)
|
||||
logger.warning(msg, location)
|
||||
return (None, None)
|
||||
|
||||
if path is None:
|
||||
source: LinkSource = _RemoteFileSource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
page_validator=page_validator,
|
||||
link=Link(url, cache_link_parsing=cache_link_parsing),
|
||||
)
|
||||
return (url, source)
|
||||
|
||||
if os.path.isdir(path):
|
||||
if expand_dir:
|
||||
source = _FlatDirectorySource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
path=path,
|
||||
project_name=project_name,
|
||||
)
|
||||
else:
|
||||
source = _IndexDirectorySource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
link=Link(url, cache_link_parsing=cache_link_parsing),
|
||||
)
|
||||
return (url, source)
|
||||
elif os.path.isfile(path):
|
||||
source = _LocalFileSource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
link=Link(url, cache_link_parsing=cache_link_parsing),
|
||||
)
|
||||
return (url, source)
|
||||
logger.warning(
|
||||
"Location '%s' is ignored: it is neither a file nor a directory.",
|
||||
location,
|
||||
)
|
||||
return (url, None)
|
456
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__init__.py
vendored
Normal file
456
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__init__.py
vendored
Normal file
@ -0,0 +1,456 @@
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import sysconfig
|
||||
from typing import Any, Dict, Generator, Optional, Tuple
|
||||
|
||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from . import _sysconfig
|
||||
from .base import (
|
||||
USER_CACHE_DIR,
|
||||
get_major_minor_version,
|
||||
get_src_prefix,
|
||||
is_osx_framework,
|
||||
site_packages,
|
||||
user_site,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"USER_CACHE_DIR",
|
||||
"get_bin_prefix",
|
||||
"get_bin_user",
|
||||
"get_major_minor_version",
|
||||
"get_platlib",
|
||||
"get_purelib",
|
||||
"get_scheme",
|
||||
"get_src_prefix",
|
||||
"site_packages",
|
||||
"user_site",
|
||||
]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
||||
|
||||
_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
|
||||
|
||||
|
||||
def _should_use_sysconfig() -> bool:
|
||||
"""This function determines the value of _USE_SYSCONFIG.
|
||||
|
||||
By default, pip uses sysconfig on Python 3.10+.
|
||||
But Python distributors can override this decision by setting:
|
||||
sysconfig._PIP_USE_SYSCONFIG = True / False
|
||||
Rationale in https://github.com/pypa/pip/issues/10647
|
||||
|
||||
This is a function for testability, but should be constant during any one
|
||||
run.
|
||||
"""
|
||||
return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
|
||||
|
||||
|
||||
_USE_SYSCONFIG = _should_use_sysconfig()
|
||||
|
||||
if not _USE_SYSCONFIG:
|
||||
# Import distutils lazily to avoid deprecation warnings,
|
||||
# but import it soon enough that it is in memory and available during
|
||||
# a pip reinstall.
|
||||
from . import _distutils
|
||||
|
||||
# Be noisy about incompatibilities if this platforms "should" be using
|
||||
# sysconfig, but is explicitly opting out and using distutils instead.
|
||||
if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
|
||||
_MISMATCH_LEVEL = logging.WARNING
|
||||
else:
|
||||
_MISMATCH_LEVEL = logging.DEBUG
|
||||
|
||||
|
||||
def _looks_like_bpo_44860() -> bool:
|
||||
"""The resolution to bpo-44860 will change this incorrect platlib.
|
||||
|
||||
See <https://bugs.python.org/issue44860>.
|
||||
"""
|
||||
from distutils.command.install import INSTALL_SCHEMES
|
||||
|
||||
try:
|
||||
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
||||
except KeyError:
|
||||
return False
|
||||
return unix_user_platlib == "$usersite"
|
||||
|
||||
|
||||
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
||||
platlib = scheme["platlib"]
|
||||
if "/$platlibdir/" in platlib:
|
||||
platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
|
||||
if "/lib64/" not in platlib:
|
||||
return False
|
||||
unpatched = platlib.replace("/lib64/", "/lib/")
|
||||
return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_red_hat_lib() -> bool:
|
||||
"""Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
|
||||
|
||||
This is the only way I can see to tell a Red Hat-patched Python.
|
||||
"""
|
||||
from distutils.command.install import INSTALL_SCHEMES
|
||||
|
||||
return all(
|
||||
k in INSTALL_SCHEMES
|
||||
and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
|
||||
for k in ("unix_prefix", "unix_home")
|
||||
)
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_debian_scheme() -> bool:
|
||||
"""Debian adds two additional schemes."""
|
||||
from distutils.command.install import INSTALL_SCHEMES
|
||||
|
||||
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_red_hat_scheme() -> bool:
|
||||
"""Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
|
||||
|
||||
Red Hat's ``00251-change-user-install-location.patch`` changes the install
|
||||
command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
|
||||
(fortunately?) done quite unconditionally, so we create a default command
|
||||
object without any configuration to detect this.
|
||||
"""
|
||||
from distutils.command.install import install
|
||||
from distutils.dist import Distribution
|
||||
|
||||
cmd: Any = install(Distribution())
|
||||
cmd.finalize_options()
|
||||
return (
|
||||
cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
|
||||
and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
|
||||
)
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_slackware_scheme() -> bool:
|
||||
"""Slackware patches sysconfig but fails to patch distutils and site.
|
||||
|
||||
Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
|
||||
path, but does not do the same to the site module.
|
||||
"""
|
||||
if user_site is None: # User-site not available.
|
||||
return False
|
||||
try:
|
||||
paths = sysconfig.get_paths(scheme="posix_user", expand=False)
|
||||
except KeyError: # User-site not available.
|
||||
return False
|
||||
return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_msys2_mingw_scheme() -> bool:
|
||||
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
||||
|
||||
However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
|
||||
likely going to be included in their 3.10 release, so we ignore the warning.
|
||||
See msys2/MINGW-packages#9319.
|
||||
|
||||
MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
|
||||
and is missing the final ``"site-packages"``.
|
||||
"""
|
||||
paths = sysconfig.get_paths("nt", expand=False)
|
||||
return all(
|
||||
"Lib" not in p and "lib" in p and not p.endswith("site-packages")
|
||||
for p in (paths[key] for key in ("platlib", "purelib"))
|
||||
)
|
||||
|
||||
|
||||
def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
|
||||
ldversion = sysconfig.get_config_var("LDVERSION")
|
||||
abiflags = getattr(sys, "abiflags", None)
|
||||
|
||||
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
||||
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
||||
yield from parts
|
||||
return
|
||||
|
||||
# Strip sys.abiflags from LDVERSION-based path components.
|
||||
for part in parts:
|
||||
if part.endswith(ldversion):
|
||||
part = part[: (0 - len(abiflags))]
|
||||
yield part
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
|
||||
issue_url = "https://github.com/pypa/pip/issues/10151"
|
||||
message = (
|
||||
"Value for %s does not match. Please report this to <%s>"
|
||||
"\ndistutils: %s"
|
||||
"\nsysconfig: %s"
|
||||
)
|
||||
logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
|
||||
|
||||
|
||||
def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
|
||||
if old == new:
|
||||
return False
|
||||
_warn_mismatched(old, new, key=key)
|
||||
return True
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _log_context(
|
||||
*,
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
prefix: Optional[str] = None,
|
||||
) -> None:
|
||||
parts = [
|
||||
"Additional context:",
|
||||
"user = %r",
|
||||
"home = %r",
|
||||
"root = %r",
|
||||
"prefix = %r",
|
||||
]
|
||||
|
||||
logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
|
||||
|
||||
|
||||
def get_scheme(
|
||||
dist_name: str,
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
isolated: bool = False,
|
||||
prefix: Optional[str] = None,
|
||||
) -> Scheme:
|
||||
new = _sysconfig.get_scheme(
|
||||
dist_name,
|
||||
user=user,
|
||||
home=home,
|
||||
root=root,
|
||||
isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
old = _distutils.get_scheme(
|
||||
dist_name,
|
||||
user=user,
|
||||
home=home,
|
||||
root=root,
|
||||
isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
warning_contexts = []
|
||||
for k in SCHEME_KEYS:
|
||||
old_v = pathlib.Path(getattr(old, k))
|
||||
new_v = pathlib.Path(getattr(new, k))
|
||||
|
||||
if old_v == new_v:
|
||||
continue
|
||||
|
||||
# distutils incorrectly put PyPy packages under ``site-packages/python``
|
||||
# in the ``posix_home`` scheme, but PyPy devs said they expect the
|
||||
# directory name to be ``pypy`` instead. So we treat this as a bug fix
|
||||
# and not warn about it. See bpo-43307 and python/cpython#24628.
|
||||
skip_pypy_special_case = (
|
||||
sys.implementation.name == "pypy"
|
||||
and home is not None
|
||||
and k in ("platlib", "purelib")
|
||||
and old_v.parent == new_v.parent
|
||||
and old_v.name.startswith("python")
|
||||
and new_v.name.startswith("pypy")
|
||||
)
|
||||
if skip_pypy_special_case:
|
||||
continue
|
||||
|
||||
# sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
|
||||
# the ``include`` value, but distutils's ``headers`` does. We'll let
|
||||
# CPython decide whether this is a bug or feature. See bpo-43948.
|
||||
skip_osx_framework_user_special_case = (
|
||||
user
|
||||
and is_osx_framework()
|
||||
and k == "headers"
|
||||
and old_v.parent.parent == new_v.parent
|
||||
and old_v.parent.name.startswith("python")
|
||||
)
|
||||
if skip_osx_framework_user_special_case:
|
||||
continue
|
||||
|
||||
# On Red Hat and derived Linux distributions, distutils is patched to
|
||||
# use "lib64" instead of "lib" for platlib.
|
||||
if k == "platlib" and _looks_like_red_hat_lib():
|
||||
continue
|
||||
|
||||
# On Python 3.9+, sysconfig's posix_user scheme sets platlib against
|
||||
# sys.platlibdir, but distutils's unix_user incorrectly coninutes
|
||||
# using the same $usersite for both platlib and purelib. This creates a
|
||||
# mismatch when sys.platlibdir is not "lib".
|
||||
skip_bpo_44860 = (
|
||||
user
|
||||
and k == "platlib"
|
||||
and not WINDOWS
|
||||
and sys.version_info >= (3, 9)
|
||||
and _PLATLIBDIR != "lib"
|
||||
and _looks_like_bpo_44860()
|
||||
)
|
||||
if skip_bpo_44860:
|
||||
continue
|
||||
|
||||
# Slackware incorrectly patches posix_user to use lib64 instead of lib,
|
||||
# but not usersite to match the location.
|
||||
skip_slackware_user_scheme = (
|
||||
user
|
||||
and k in ("platlib", "purelib")
|
||||
and not WINDOWS
|
||||
and _looks_like_slackware_scheme()
|
||||
)
|
||||
if skip_slackware_user_scheme:
|
||||
continue
|
||||
|
||||
# Both Debian and Red Hat patch Python to place the system site under
|
||||
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
||||
# instead of site-packages, but the /usr/local check should cover it.
|
||||
skip_linux_system_special_case = (
|
||||
not (user or home or prefix or running_under_virtualenv())
|
||||
and old_v.parts[1:3] == ("usr", "local")
|
||||
and len(new_v.parts) > 1
|
||||
and new_v.parts[1] == "usr"
|
||||
and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
|
||||
and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
|
||||
)
|
||||
if skip_linux_system_special_case:
|
||||
continue
|
||||
|
||||
# MSYS2 MINGW's sysconfig patch does not include the "site-packages"
|
||||
# part of the path. This is incorrect and will be fixed in MSYS.
|
||||
skip_msys2_mingw_bug = (
|
||||
WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
|
||||
)
|
||||
if skip_msys2_mingw_bug:
|
||||
continue
|
||||
|
||||
# CPython's POSIX install script invokes pip (via ensurepip) against the
|
||||
# interpreter located in the source tree, not the install site. This
|
||||
# triggers special logic in sysconfig that's not present in distutils.
|
||||
# https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
|
||||
skip_cpython_build = (
|
||||
sysconfig.is_python_build(check_home=True)
|
||||
and not WINDOWS
|
||||
and k in ("headers", "include", "platinclude")
|
||||
)
|
||||
if skip_cpython_build:
|
||||
continue
|
||||
|
||||
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
||||
|
||||
if not warning_contexts:
|
||||
return old
|
||||
|
||||
# Check if this path mismatch is caused by distutils config files. Those
|
||||
# files will no longer work once we switch to sysconfig, so this raises a
|
||||
# deprecation message for them.
|
||||
default_old = _distutils.distutils_scheme(
|
||||
dist_name,
|
||||
user,
|
||||
home,
|
||||
root,
|
||||
isolated,
|
||||
prefix,
|
||||
ignore_config_files=True,
|
||||
)
|
||||
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
||||
deprecated(
|
||||
reason=(
|
||||
"Configuring installation scheme with distutils config files "
|
||||
"is deprecated and will no longer work in the near future. If you "
|
||||
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
||||
"at https://github.com/Homebrew/homebrew-core/issues/76621"
|
||||
),
|
||||
replacement=None,
|
||||
gone_in=None,
|
||||
)
|
||||
return old
|
||||
|
||||
# Post warnings about this mismatch so user can report them back.
|
||||
for old_v, new_v, key in warning_contexts:
|
||||
_warn_mismatched(old_v, new_v, key=key)
|
||||
_log_context(user=user, home=home, root=root, prefix=prefix)
|
||||
|
||||
return old
|
||||
|
||||
|
||||
def get_bin_prefix() -> str:
|
||||
new = _sysconfig.get_bin_prefix()
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
old = _distutils.get_bin_prefix()
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
||||
_log_context()
|
||||
return old
|
||||
|
||||
|
||||
def get_bin_user() -> str:
|
||||
return _sysconfig.get_scheme("", user=True).scripts
|
||||
|
||||
|
||||
def _looks_like_deb_system_dist_packages(value: str) -> bool:
|
||||
"""Check if the value is Debian's APT-controlled dist-packages.
|
||||
|
||||
Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
|
||||
default package path controlled by APT, but does not patch ``sysconfig`` to
|
||||
do the same. This is similar to the bug worked around in ``get_scheme()``,
|
||||
but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
|
||||
we can't do anything about this Debian bug, and this detection allows us to
|
||||
skip the warning when needed.
|
||||
"""
|
||||
if not _looks_like_debian_scheme():
|
||||
return False
|
||||
if value == "/usr/lib/python3/dist-packages":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_purelib() -> str:
|
||||
"""Return the default pure-Python lib location."""
|
||||
new = _sysconfig.get_purelib()
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
old = _distutils.get_purelib()
|
||||
if _looks_like_deb_system_dist_packages(old):
|
||||
return old
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
||||
_log_context()
|
||||
return old
|
||||
|
||||
|
||||
def get_platlib() -> str:
|
||||
"""Return the default platform-shared lib location."""
|
||||
new = _sysconfig.get_platlib()
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
from . import _distutils
|
||||
|
||||
old = _distutils.get_platlib()
|
||||
if _looks_like_deb_system_dist_packages(old):
|
||||
return old
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
||||
_log_context()
|
||||
return old
|
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-313.pyc
vendored
Normal file
Binary file not shown.
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-313.pyc
vendored
Normal file
BIN
Dependencies/Python/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-313.pyc
vendored
Normal file
Binary file not shown.
172
Dependencies/Python/Lib/site-packages/pip/_internal/locations/_distutils.py
vendored
Normal file
172
Dependencies/Python/Lib/site-packages/pip/_internal/locations/_distutils.py
vendored
Normal file
@ -0,0 +1,172 @@
|
||||
"""Locations where we look for configs, install stuff, etc"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
# If pip's going to use distutils, it should not be using the copy that setuptools
|
||||
# might have injected into the environment. This is done by removing the injected
|
||||
# shim, if it's injected.
|
||||
#
|
||||
# See https://github.com/pypa/pip/issues/8761 for the original discussion and
|
||||
# rationale for why this is done within pip.
|
||||
try:
|
||||
__import__("_distutils_hack").remove_shim()
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from distutils.cmd import Command as DistutilsCommand
|
||||
from distutils.command.install import SCHEME_KEYS
|
||||
from distutils.command.install import install as distutils_install_command
|
||||
from distutils.sysconfig import get_python_lib
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from pip._internal.models.scheme import Scheme
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from .base import get_major_minor_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def distutils_scheme(
|
||||
dist_name: str,
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
isolated: bool = False,
|
||||
prefix: Optional[str] = None,
|
||||
*,
|
||||
ignore_config_files: bool = False,
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Return a distutils install scheme
|
||||
"""
|
||||
from distutils.dist import Distribution
|
||||
|
||||
dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
|
||||
if isolated:
|
||||
dist_args["script_args"] = ["--no-user-cfg"]
|
||||
|
||||
d = Distribution(dist_args)
|
||||
if not ignore_config_files:
|
||||
try:
|
||||
d.parse_config_files()
|
||||
except UnicodeDecodeError:
|
||||
paths = d.find_config_files()
|
||||
logger.warning(
|
||||
"Ignore distutils configs in %s due to encoding errors.",
|
||||
", ".join(os.path.basename(p) for p in paths),
|
||||
)
|
||||
obj: Optional[DistutilsCommand] = None
|
||||
obj = d.get_command_obj("install", create=True)
|
||||
assert obj is not None
|
||||
i: distutils_install_command = obj
|
||||
# NOTE: setting user or home has the side-effect of creating the home dir
|
||||
# or user base for installations during finalize_options()
|
||||
# ideally, we'd prefer a scheme class that has no side-effects.
|
||||
assert not (user and prefix), f"user={user} prefix={prefix}"
|
||||
assert not (home and prefix), f"home={home} prefix={prefix}"
|
||||
i.user = user or i.user
|
||||
if user or home:
|
||||
i.prefix = ""
|
||||
i.prefix = prefix or i.prefix
|
||||
i.home = home or i.home
|
||||
i.root = root or i.root
|
||||
i.finalize_options()
|
||||
|
||||
scheme: Dict[str, str] = {}
|
||||
for key in SCHEME_KEYS:
|
||||
scheme[key] = getattr(i, "install_" + key)
|
||||
|
||||
# install_lib specified in setup.cfg should install *everything*
|
||||
# into there (i.e. it takes precedence over both purelib and
|
||||
# platlib). Note, i.install_lib is *always* set after
|
||||
# finalize_options(); we only want to override here if the user
|
||||
# has explicitly requested it hence going back to the config
|
||||
if "install_lib" in d.get_option_dict("install"):
|
||||
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
|
||||
|
||||
if running_under_virtualenv():
|
||||
if home:
|
||||
prefix = home
|
||||
elif user:
|
||||
prefix = i.install_userbase
|
||||
else:
|
||||
prefix = i.prefix
|
||||
scheme["headers"] = os.path.join(
|
||||
prefix,
|
||||
"include",
|
||||
"site",
|
||||
f"python{get_major_minor_version()}",
|
||||
dist_name,
|
||||
)
|
||||
|
||||
if root is not None:
|
||||
path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
|
||||
scheme["headers"] = os.path.join(root, path_no_drive[1:])
|
||||
|
||||
return scheme
|
||||
|
||||
|
||||
def get_scheme(
|
||||
dist_name: str,
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
isolated: bool = False,
|
||||
prefix: Optional[str] = None,
|
||||
) -> Scheme:
|
||||
"""
|
||||
Get the "scheme" corresponding to the input parameters. The distutils
|
||||
documentation provides the context for the available schemes:
|
||||
https://docs.python.org/3/install/index.html#alternate-installation
|
||||
|
||||
:param dist_name: the name of the package to retrieve the scheme for, used
|
||||
in the headers scheme path
|
||||
:param user: indicates to use the "user" scheme
|
||||
:param home: indicates to use the "home" scheme and provides the base
|
||||
directory for the same
|
||||
:param root: root under which other directories are re-based
|
||||
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
|
||||
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
|
||||
scheme paths
|
||||
:param prefix: indicates to use the "prefix" scheme and provides the
|
||||
base directory for the same
|
||||
"""
|
||||
scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
|
||||
return Scheme(
|
||||
platlib=scheme["platlib"],
|
||||
purelib=scheme["purelib"],
|
||||
headers=scheme["headers"],
|
||||
scripts=scheme["scripts"],
|
||||
data=scheme["data"],
|
||||
)
|
||||
|
||||
|
||||
def get_bin_prefix() -> str:
|
||||
# XXX: In old virtualenv versions, sys.prefix can contain '..' components,
|
||||
# so we need to call normpath to eliminate them.
|
||||
prefix = os.path.normpath(sys.prefix)
|
||||
if WINDOWS:
|
||||
bin_py = os.path.join(prefix, "Scripts")
|
||||
# buildout uses 'bin' on Windows too?
|
||||
if not os.path.exists(bin_py):
|
||||
bin_py = os.path.join(prefix, "bin")
|
||||
return bin_py
|
||||
# Forcing to use /usr/local/bin for standard macOS framework installs
|
||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||
if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
|
||||
return "/usr/local/bin"
|
||||
return os.path.join(prefix, "bin")
|
||||
|
||||
|
||||
def get_purelib() -> str:
|
||||
return get_python_lib(plat_specific=False)
|
||||
|
||||
|
||||
def get_platlib() -> str:
|
||||
return get_python_lib(plat_specific=True)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user