Eliminado venv y www del repositorio, agrege un requirements igual
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,15 +1,17 @@
|
||||
"""
|
||||
This code was taken from https://github.com/ActiveState/appdirs and modified
|
||||
to suit our purposes.
|
||||
This code wraps the vendored appdirs module to so the return values are
|
||||
compatible for the current pip code base.
|
||||
|
||||
The intention is to rewrite current usages gradually, keeping the tests pass,
|
||||
and eventually drop this after all usages are changed.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pip._vendor.six import PY2, text_type
|
||||
from pip._vendor import appdirs as _appdirs
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS, expanduser
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
@@ -18,251 +20,25 @@ if MYPY_CHECK_RUNNING:
|
||||
|
||||
def user_cache_dir(appname):
|
||||
# type: (str) -> str
|
||||
r"""
|
||||
Return full path to the user-specific cache dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
|
||||
Typical user cache directories are:
|
||||
macOS: ~/Library/Caches/<AppName>
|
||||
Unix: ~/.cache/<AppName> (XDG default)
|
||||
Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings go
|
||||
in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
|
||||
non-roaming app data dir (the default returned by `user_data_dir`). Apps
|
||||
typically put cache data somewhere *under* the given dir here. Some
|
||||
examples:
|
||||
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||
...\Acme\SuperApp\Cache\1.0
|
||||
|
||||
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||
"""
|
||||
if WINDOWS:
|
||||
# Get the base path
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||
|
||||
# When using Python 2, return paths as bytes on Windows like we do on
|
||||
# other operating systems. See helper function docs for more details.
|
||||
if PY2 and isinstance(path, text_type):
|
||||
path = _win_path_to_bytes(path)
|
||||
|
||||
# Add our app name and Cache directory to it
|
||||
path = os.path.join(path, appname, "Cache")
|
||||
elif sys.platform == "darwin":
|
||||
# Get the base path
|
||||
path = expanduser("~/Library/Caches")
|
||||
|
||||
# Add our app name to it
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
# Get the base path
|
||||
path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
|
||||
|
||||
# Add our app name to it
|
||||
path = os.path.join(path, appname)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def user_data_dir(appname, roaming=False):
|
||||
# type: (str, bool) -> str
|
||||
r"""
|
||||
Return full path to the user-specific data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
macOS: ~/Library/Application Support/<AppName>
|
||||
if it exists, else ~/.config/<AppName>
|
||||
Unix: ~/.local/share/<AppName> # or in
|
||||
$XDG_DATA_HOME, if defined
|
||||
Win XP (not roaming): C:\Documents and Settings\<username>\ ...
|
||||
...Application Data\<AppName>
|
||||
Win XP (roaming): C:\Documents and Settings\<username>\Local ...
|
||||
...Settings\Application Data\<AppName>
|
||||
Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
|
||||
Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||
That means, by default "~/.local/share/<AppName>".
|
||||
"""
|
||||
if WINDOWS:
|
||||
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
|
||||
elif sys.platform == "darwin":
|
||||
path = os.path.join(
|
||||
expanduser('~/Library/Application Support/'),
|
||||
appname,
|
||||
) if os.path.isdir(os.path.join(
|
||||
expanduser('~/Library/Application Support/'),
|
||||
appname,
|
||||
)
|
||||
) else os.path.join(
|
||||
expanduser('~/.config/'),
|
||||
appname,
|
||||
)
|
||||
else:
|
||||
path = os.path.join(
|
||||
os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
|
||||
appname,
|
||||
)
|
||||
|
||||
return path
|
||||
return _appdirs.user_cache_dir(appname, appauthor=False)
|
||||
|
||||
|
||||
def user_config_dir(appname, roaming=True):
|
||||
# type: (str, bool) -> str
|
||||
"""Return full path to the user-specific config dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"roaming" (boolean, default True) can be set False to not use the
|
||||
Windows roaming appdata directory. That means that for users on a
|
||||
Windows network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
macOS: same as user_data_dir
|
||||
Unix: ~/.config/<AppName>
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||
That means, by default "~/.config/<AppName>".
|
||||
"""
|
||||
if WINDOWS:
|
||||
path = user_data_dir(appname, roaming=roaming)
|
||||
elif sys.platform == "darwin":
|
||||
path = user_data_dir(appname)
|
||||
else:
|
||||
path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
|
||||
path = os.path.join(path, appname)
|
||||
|
||||
path = _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
|
||||
if _appdirs.system == "darwin" and not os.path.isdir(path):
|
||||
path = os.path.expanduser('~/.config/')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
return path
|
||||
|
||||
|
||||
# for the discussion regarding site_config_dirs locations
|
||||
# for the discussion regarding site_config_dir locations
|
||||
# see <https://github.com/pypa/pip/issues/1733>
|
||||
def site_config_dirs(appname):
|
||||
# type: (str) -> List[str]
|
||||
r"""Return a list of potential user-shared config dirs for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
|
||||
Typical user config directories are:
|
||||
macOS: /Library/Application Support/<AppName>/
|
||||
Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
|
||||
$XDG_CONFIG_DIRS
|
||||
Win XP: C:\Documents and Settings\All Users\Application ...
|
||||
...Data\<AppName>\
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
|
||||
on Vista.)
|
||||
Win 7: Hidden, but writeable on Win 7:
|
||||
C:\ProgramData\<AppName>\
|
||||
"""
|
||||
if WINDOWS:
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
pathlist = [os.path.join(path, appname)]
|
||||
elif sys.platform == 'darwin':
|
||||
pathlist = [os.path.join('/Library/Application Support', appname)]
|
||||
else:
|
||||
# try looking in $XDG_CONFIG_DIRS
|
||||
xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||
if xdg_config_dirs:
|
||||
pathlist = [
|
||||
os.path.join(expanduser(x), appname)
|
||||
for x in xdg_config_dirs.split(os.pathsep)
|
||||
]
|
||||
else:
|
||||
pathlist = []
|
||||
|
||||
dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
|
||||
if _appdirs.system not in ["win32", "darwin"]:
|
||||
# always look in /etc directly as well
|
||||
pathlist.append('/etc')
|
||||
|
||||
return pathlist
|
||||
|
||||
|
||||
# -- Windows support functions --
|
||||
|
||||
def _get_win_folder_from_registry(csidl_name):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
This is a fallback technique at best. I'm not sure if using the
|
||||
registry for this guarantees us the correct answer for all CSIDL_*
|
||||
names.
|
||||
"""
|
||||
import _winreg
|
||||
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||
}[csidl_name]
|
||||
|
||||
key = _winreg.OpenKey(
|
||||
_winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||
)
|
||||
directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||
return directory
|
||||
|
||||
|
||||
def _get_win_folder_with_ctypes(csidl_name):
|
||||
# type: (str) -> str
|
||||
csidl_const = {
|
||||
"CSIDL_APPDATA": 26,
|
||||
"CSIDL_COMMON_APPDATA": 35,
|
||||
"CSIDL_LOCAL_APPDATA": 28,
|
||||
}[csidl_name]
|
||||
|
||||
buf = ctypes.create_unicode_buffer(1024)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in buf:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf2 = ctypes.create_unicode_buffer(1024)
|
||||
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||
buf = buf2
|
||||
|
||||
return buf.value
|
||||
|
||||
|
||||
if WINDOWS:
|
||||
try:
|
||||
import ctypes
|
||||
_get_win_folder = _get_win_folder_with_ctypes
|
||||
except ImportError:
|
||||
_get_win_folder = _get_win_folder_from_registry
|
||||
|
||||
|
||||
def _win_path_to_bytes(path):
|
||||
"""Encode Windows paths to bytes. Only used on Python 2.
|
||||
|
||||
Motivation is to be consistent with other operating systems where paths
|
||||
are also returned as bytes. This avoids problems mixing bytes and Unicode
|
||||
elsewhere in the codebase. For more details and discussion see
|
||||
<https://github.com/pypa/pip/issues/3463>.
|
||||
|
||||
If encoding using ASCII and MBCS fails, return the original Unicode path.
|
||||
"""
|
||||
for encoding in ('ASCII', 'MBCS'):
|
||||
try:
|
||||
return path.encode(encoding)
|
||||
except (UnicodeEncodeError, LookupError):
|
||||
pass
|
||||
return path
|
||||
return dirval.split(os.pathsep) + ['/etc']
|
||||
return [dirval]
|
||||
|
||||
@@ -1,29 +1,33 @@
|
||||
"""Stuff that differs in different Python versions and platform
|
||||
distributions."""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: disallow-untyped-defs=False
|
||||
|
||||
from __future__ import absolute_import, division
|
||||
|
||||
import codecs
|
||||
import functools
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pip._vendor.six import text_type
|
||||
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
||||
from pip._vendor.six import PY2, text_type
|
||||
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional, Text, Tuple, Union
|
||||
from typing import Callable, Optional, Protocol, Text, Tuple, TypeVar, Union
|
||||
|
||||
try:
|
||||
import _ssl # noqa
|
||||
except ImportError:
|
||||
ssl = None
|
||||
else:
|
||||
# This additional assignment was needed to prevent a mypy error.
|
||||
ssl = _ssl
|
||||
# Used in the @lru_cache polyfill.
|
||||
F = TypeVar('F')
|
||||
|
||||
class LruCache(Protocol):
|
||||
def __call__(self, maxsize=None):
|
||||
# type: (Optional[int]) -> Callable[[F], F]
|
||||
raise NotImplementedError
|
||||
|
||||
try:
|
||||
import ipaddress
|
||||
@@ -37,20 +41,14 @@ except ImportError:
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ipaddress", "uses_pycache", "console_to_str", "native_str",
|
||||
"ipaddress", "uses_pycache", "console_to_str",
|
||||
"get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
|
||||
"get_extension_suffixes",
|
||||
]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
HAS_TLS = (ssl is not None) or IS_PYOPENSSL
|
||||
|
||||
if sys.version_info >= (3, 4):
|
||||
uses_pycache = True
|
||||
from importlib.util import cache_from_source
|
||||
else:
|
||||
if PY2:
|
||||
import imp
|
||||
|
||||
try:
|
||||
@@ -60,27 +58,41 @@ else:
|
||||
cache_from_source = None
|
||||
|
||||
uses_pycache = cache_from_source is not None
|
||||
|
||||
|
||||
if sys.version_info >= (3, 5):
|
||||
backslashreplace_decode = "backslashreplace"
|
||||
else:
|
||||
# In version 3.4 and older, backslashreplace exists
|
||||
uses_pycache = True
|
||||
from importlib.util import cache_from_source
|
||||
|
||||
|
||||
if PY2:
|
||||
# In Python 2.7, backslashreplace exists
|
||||
# but does not support use for decoding.
|
||||
# We implement our own replace handler for this
|
||||
# situation, so that we can consistently use
|
||||
# backslash replacement for all versions.
|
||||
def backslashreplace_decode_fn(err):
|
||||
raw_bytes = (err.object[i] for i in range(err.start, err.end))
|
||||
if sys.version_info[0] == 2:
|
||||
# Python 2 gave us characters - convert to numeric bytes
|
||||
raw_bytes = (ord(b) for b in raw_bytes)
|
||||
return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
|
||||
# Python 2 gave us characters - convert to numeric bytes
|
||||
raw_bytes = (ord(b) for b in raw_bytes)
|
||||
return u"".join(map(u"\\x{:x}".format, raw_bytes)), err.end
|
||||
codecs.register_error(
|
||||
"backslashreplace_decode",
|
||||
backslashreplace_decode_fn,
|
||||
)
|
||||
backslashreplace_decode = "backslashreplace_decode"
|
||||
else:
|
||||
backslashreplace_decode = "backslashreplace"
|
||||
|
||||
|
||||
def has_tls():
|
||||
# type: () -> bool
|
||||
try:
|
||||
import _ssl # noqa: F401 # ignore unused
|
||||
return True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
||||
return IS_PYOPENSSL
|
||||
|
||||
|
||||
def str_to_display(data, desc=None):
|
||||
@@ -118,10 +130,11 @@ def str_to_display(data, desc=None):
|
||||
try:
|
||||
decoded_data = data.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
if desc is None:
|
||||
desc = 'Bytes object'
|
||||
msg_format = '{} does not appear to be encoded as %s'.format(desc)
|
||||
logger.warning(msg_format, encoding)
|
||||
logger.warning(
|
||||
'%s does not appear to be encoded as %s',
|
||||
desc or 'Bytes object',
|
||||
encoding,
|
||||
)
|
||||
decoded_data = data.decode(encoding, errors=backslashreplace_decode)
|
||||
|
||||
# Make sure we can print the output, by encoding it to the output
|
||||
@@ -156,22 +169,6 @@ def console_to_str(data):
|
||||
return str_to_display(data, desc='Subprocess output')
|
||||
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
def native_str(s, replace=False):
|
||||
# type: (str, bool) -> str
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('utf-8', 'replace' if replace else 'strict')
|
||||
return s
|
||||
|
||||
else:
|
||||
def native_str(s, replace=False):
|
||||
# type: (str, bool) -> str
|
||||
# Replace is ignored -- unicode to UTF-8 can't fail
|
||||
if isinstance(s, text_type):
|
||||
return s.encode('utf-8')
|
||||
return s
|
||||
|
||||
|
||||
def get_path_uid(path):
|
||||
# type: (str) -> int
|
||||
"""
|
||||
@@ -197,23 +194,12 @@ def get_path_uid(path):
|
||||
else:
|
||||
# raise OSError for parity with os.O_NOFOLLOW above
|
||||
raise OSError(
|
||||
"%s is a symlink; Will not return uid for symlinks" % path
|
||||
"{} is a symlink; Will not return uid for symlinks".format(
|
||||
path)
|
||||
)
|
||||
return file_uid
|
||||
|
||||
|
||||
if sys.version_info >= (3, 4):
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
|
||||
def get_extension_suffixes():
|
||||
return EXTENSION_SUFFIXES
|
||||
else:
|
||||
from imp import get_suffixes
|
||||
|
||||
def get_extension_suffixes():
|
||||
return [suffix[0] for suffix in get_suffixes()]
|
||||
|
||||
|
||||
def expanduser(path):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
@@ -282,12 +268,26 @@ else:
|
||||
return cr
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except Exception:
|
||||
pass
|
||||
if sys.platform != "win32":
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except Exception:
|
||||
pass
|
||||
if not cr:
|
||||
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
# Fallback to noop_lru_cache in Python 2
|
||||
# TODO: this can be removed when python 2 support is dropped!
|
||||
def noop_lru_cache(maxsize=None):
|
||||
# type: (Optional[int]) -> Callable[[F], F]
|
||||
def _wrapper(f):
|
||||
# type: (F) -> F
|
||||
return f
|
||||
return _wrapper
|
||||
|
||||
|
||||
lru_cache = getattr(functools, "lru_cache", noop_lru_cache) # type: LruCache
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
"""
|
||||
A module that implements tooling to enable easy warnings about deprecations.
|
||||
"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: disallow-untyped-defs=False
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
@@ -32,7 +32,9 @@ def auto_decode(data):
|
||||
# Lets check the first two lines as in PEP263
|
||||
for line in data.split(b'\n')[:2]:
|
||||
if line[0:1] == b'#' and ENCODING_RE.search(line):
|
||||
encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
|
||||
result = ENCODING_RE.search(line)
|
||||
assert result is not None
|
||||
encoding = result.groups()[0].decode('ascii')
|
||||
return data.decode(encoding)
|
||||
return data.decode(
|
||||
locale.getpreferredencoding(False) or sys.getdefaultencoding(),
|
||||
|
||||
@@ -1,16 +1,42 @@
|
||||
import errno
|
||||
import fnmatch
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
|
||||
# why we ignore the type on this import.
|
||||
from pip._vendor.retrying import retry # type: ignore
|
||||
from pip._vendor.six import PY2
|
||||
|
||||
from pip._internal.utils.compat import get_path_uid
|
||||
from pip._internal.utils.misc import format_size
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, BinaryIO, Iterator, List, Union
|
||||
|
||||
class NamedTemporaryFileResult(BinaryIO):
|
||||
@property
|
||||
def file(self):
|
||||
# type: () -> BinaryIO
|
||||
pass
|
||||
|
||||
|
||||
def check_path_owner(path):
|
||||
# type: (str) -> bool
|
||||
# If we don't have a way to check the effective uid of this process, then
|
||||
# we'll just assume that we own the directory.
|
||||
if not hasattr(os, "geteuid"):
|
||||
if sys.platform == "win32" or not hasattr(os, "geteuid"):
|
||||
return True
|
||||
|
||||
assert os.path.isabs(path)
|
||||
|
||||
previous = None
|
||||
while path != previous:
|
||||
if os.path.lexists(path):
|
||||
@@ -28,3 +54,171 @@ def check_path_owner(path):
|
||||
else:
|
||||
previous, path = path, os.path.dirname(path)
|
||||
return False # assume we don't own the path
|
||||
|
||||
|
||||
def copy2_fixed(src, dest):
|
||||
# type: (str, str) -> None
|
||||
"""Wrap shutil.copy2() but map errors copying socket files to
|
||||
SpecialFileError as expected.
|
||||
|
||||
See also https://bugs.python.org/issue37700.
|
||||
"""
|
||||
try:
|
||||
shutil.copy2(src, dest)
|
||||
except (OSError, IOError):
|
||||
for f in [src, dest]:
|
||||
try:
|
||||
is_socket_file = is_socket(f)
|
||||
except OSError:
|
||||
# An error has already occurred. Another error here is not
|
||||
# a problem and we can ignore it.
|
||||
pass
|
||||
else:
|
||||
if is_socket_file:
|
||||
raise shutil.SpecialFileError(
|
||||
"`{f}` is a socket".format(**locals()))
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def is_socket(path):
|
||||
# type: (str) -> bool
|
||||
return stat.S_ISSOCK(os.lstat(path).st_mode)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def adjacent_tmp_file(path, **kwargs):
|
||||
# type: (str, **Any) -> Iterator[NamedTemporaryFileResult]
|
||||
"""Return a file-like object pointing to a tmp file next to path.
|
||||
|
||||
The file is created securely and is ensured to be written to disk
|
||||
after the context reaches its end.
|
||||
|
||||
kwargs will be passed to tempfile.NamedTemporaryFile to control
|
||||
the way the temporary file will be opened.
|
||||
"""
|
||||
with NamedTemporaryFile(
|
||||
delete=False,
|
||||
dir=os.path.dirname(path),
|
||||
prefix=os.path.basename(path),
|
||||
suffix='.tmp',
|
||||
**kwargs
|
||||
) as f:
|
||||
result = cast('NamedTemporaryFileResult', f)
|
||||
try:
|
||||
yield result
|
||||
finally:
|
||||
result.file.flush()
|
||||
os.fsync(result.file.fileno())
|
||||
|
||||
|
||||
_replace_retry = retry(stop_max_delay=1000, wait_fixed=250)
|
||||
|
||||
if PY2:
|
||||
@_replace_retry
|
||||
def replace(src, dest):
|
||||
# type: (str, str) -> None
|
||||
try:
|
||||
os.rename(src, dest)
|
||||
except OSError:
|
||||
os.remove(dest)
|
||||
os.rename(src, dest)
|
||||
|
||||
else:
|
||||
replace = _replace_retry(os.replace)
|
||||
|
||||
|
||||
# test_writable_dir and _test_writable_dir_win are copied from Flit,
|
||||
# with the author's agreement to also place them under pip's license.
|
||||
def test_writable_dir(path):
|
||||
# type: (str) -> bool
|
||||
"""Check if a directory is writable.
|
||||
|
||||
Uses os.access() on POSIX, tries creating files on Windows.
|
||||
"""
|
||||
# If the directory doesn't exist, find the closest parent that does.
|
||||
while not os.path.isdir(path):
|
||||
parent = os.path.dirname(path)
|
||||
if parent == path:
|
||||
break # Should never get here, but infinite loops are bad
|
||||
path = parent
|
||||
|
||||
if os.name == 'posix':
|
||||
return os.access(path, os.W_OK)
|
||||
|
||||
return _test_writable_dir_win(path)
|
||||
|
||||
|
||||
def _test_writable_dir_win(path):
|
||||
# type: (str) -> bool
|
||||
# os.access doesn't work on Windows: http://bugs.python.org/issue2528
|
||||
# and we can't use tempfile: http://bugs.python.org/issue22107
|
||||
basename = 'accesstest_deleteme_fishfingers_custard_'
|
||||
alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789'
|
||||
for _ in range(10):
|
||||
name = basename + ''.join(random.choice(alphabet) for _ in range(6))
|
||||
file = os.path.join(path, name)
|
||||
try:
|
||||
fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
|
||||
# Python 2 doesn't support FileExistsError and PermissionError.
|
||||
except OSError as e:
|
||||
# exception FileExistsError
|
||||
if e.errno == errno.EEXIST:
|
||||
continue
|
||||
# exception PermissionError
|
||||
if e.errno == errno.EPERM or e.errno == errno.EACCES:
|
||||
# This could be because there's a directory with the same name.
|
||||
# But it's highly unlikely there's a directory called that,
|
||||
# so we'll assume it's because the parent dir is not writable.
|
||||
# This could as well be because the parent dir is not readable,
|
||||
# due to non-privileged user access.
|
||||
return False
|
||||
raise
|
||||
else:
|
||||
os.close(fd)
|
||||
os.unlink(file)
|
||||
return True
|
||||
|
||||
# This should never be reached
|
||||
raise EnvironmentError(
|
||||
'Unexpected condition testing for writable directory'
|
||||
)
|
||||
|
||||
|
||||
def find_files(path, pattern):
|
||||
# type: (str, str) -> List[str]
|
||||
"""Returns a list of absolute paths of files beneath path, recursively,
|
||||
with filenames which match the UNIX-style shell glob pattern."""
|
||||
result = [] # type: List[str]
|
||||
for root, _, files in os.walk(path):
|
||||
matches = fnmatch.filter(files, pattern)
|
||||
result.extend(os.path.join(root, f) for f in matches)
|
||||
return result
|
||||
|
||||
|
||||
def file_size(path):
|
||||
# type: (str) -> Union[int, float]
|
||||
# If it's a symlink, return 0.
|
||||
if os.path.islink(path):
|
||||
return 0
|
||||
return os.path.getsize(path)
|
||||
|
||||
|
||||
def format_file_size(path):
|
||||
# type: (str) -> str
|
||||
return format_size(file_size(path))
|
||||
|
||||
|
||||
def directory_size(path):
|
||||
# type: (str) -> Union[int, float]
|
||||
size = 0.0
|
||||
for root, _dirs, files in os.walk(path):
|
||||
for filename in files:
|
||||
file_path = os.path.join(root, filename)
|
||||
size += file_size(file_path)
|
||||
return size
|
||||
|
||||
|
||||
def format_directory_size(path):
|
||||
# type: (str) -> str
|
||||
return format_size(directory_size(path))
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
import sys
|
||||
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
@@ -23,6 +25,8 @@ def glibc_version_string_confstr():
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module:
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
|
||||
if sys.platform == "win32":
|
||||
return None
|
||||
try:
|
||||
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
|
||||
_, version = os.confstr("CS_GNU_LIBC_VERSION").split()
|
||||
@@ -63,32 +67,6 @@ def glibc_version_string_ctypes():
|
||||
return version_str
|
||||
|
||||
|
||||
# Separated out from have_compatible_glibc for easier unit testing
|
||||
def check_glibc_version(version_str, required_major, minimum_minor):
|
||||
# type: (str, int, int) -> bool
|
||||
# Parse string and check against requested version.
|
||||
#
|
||||
# We use a regexp instead of str.split because we want to discard any
|
||||
# random junk that might come after the minor version -- this might happen
|
||||
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
# uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn("Expected glibc version with 2 components major.minor,"
|
||||
" got: %s" % version_str, RuntimeWarning)
|
||||
return False
|
||||
return (int(m.group("major")) == required_major and
|
||||
int(m.group("minor")) >= minimum_minor)
|
||||
|
||||
|
||||
def have_compatible_glibc(required_major, minimum_minor):
|
||||
# type: (int, int) -> bool
|
||||
version_str = glibc_version_string()
|
||||
if version_str is None:
|
||||
return False
|
||||
return check_glibc_version(version_str, required_major, minimum_minor)
|
||||
|
||||
|
||||
# platform.libc_ver regularly returns completely nonsensical glibc
|
||||
# versions. E.g. on my computer, platform says:
|
||||
#
|
||||
|
||||
@@ -5,7 +5,9 @@ import hashlib
|
||||
from pip._vendor.six import iteritems, iterkeys, itervalues
|
||||
|
||||
from pip._internal.exceptions import (
|
||||
HashMismatch, HashMissing, InstallationError,
|
||||
HashMismatch,
|
||||
HashMissing,
|
||||
InstallationError,
|
||||
)
|
||||
from pip._internal.utils.misc import read_chunks
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
@@ -44,6 +46,26 @@ class Hashes(object):
|
||||
"""
|
||||
self._allowed = {} if hashes is None else hashes
|
||||
|
||||
def __and__(self, other):
|
||||
# type: (Hashes) -> Hashes
|
||||
if not isinstance(other, Hashes):
|
||||
return NotImplemented
|
||||
|
||||
# If either of the Hashes object is entirely empty (i.e. no hash
|
||||
# specified at all), all hashes from the other object are allowed.
|
||||
if not other:
|
||||
return self
|
||||
if not self:
|
||||
return other
|
||||
|
||||
# Otherwise only hashes that present in both objects are allowed.
|
||||
new = {}
|
||||
for alg, values in iteritems(other._allowed):
|
||||
if alg not in self._allowed:
|
||||
continue
|
||||
new[alg] = [v for v in values if v in self._allowed[alg]]
|
||||
return Hashes(new)
|
||||
|
||||
@property
|
||||
def digest_count(self):
|
||||
# type: () -> int
|
||||
@@ -54,6 +76,7 @@ class Hashes(object):
|
||||
hash_name, # type: str
|
||||
hex_digest, # type: str
|
||||
):
|
||||
# type: (...) -> bool
|
||||
"""Return whether the given hex digest is allowed."""
|
||||
return hex_digest in self._allowed.get(hash_name, [])
|
||||
|
||||
@@ -70,7 +93,9 @@ class Hashes(object):
|
||||
try:
|
||||
gots[hash_name] = hashlib.new(hash_name)
|
||||
except (ValueError, TypeError):
|
||||
raise InstallationError('Unknown hash name: %s' % hash_name)
|
||||
raise InstallationError(
|
||||
'Unknown hash name: {}'.format(hash_name)
|
||||
)
|
||||
|
||||
for chunk in chunks:
|
||||
for hash in itervalues(gots):
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: disallow-untyped-defs=False
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import contextlib
|
||||
@@ -6,13 +9,13 @@ import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
from logging import Filter
|
||||
from logging import Filter, getLogger
|
||||
|
||||
from pip._vendor.six import PY2
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
|
||||
from pip._internal.utils.misc import ensure_dir, subprocess_logger
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
|
||||
try:
|
||||
import threading
|
||||
@@ -49,7 +52,7 @@ else:
|
||||
|
||||
|
||||
_log_state = threading.local()
|
||||
_log_state.indentation = 0
|
||||
subprocess_logger = getLogger('pip.subprocessor')
|
||||
|
||||
|
||||
class BrokenStdoutLoggingError(Exception):
|
||||
@@ -100,6 +103,8 @@ def indent_log(num=2):
|
||||
A context manager which will cause the log output to be indented for any
|
||||
log messages emitted inside it.
|
||||
"""
|
||||
# For thread-safety
|
||||
_log_state.indentation = get_indentation()
|
||||
_log_state.indentation += num
|
||||
try:
|
||||
yield
|
||||
@@ -152,7 +157,7 @@ class IndentingFormatter(logging.Formatter):
|
||||
if self.add_timestamp:
|
||||
# TODO: Use Formatter.default_time_format after dropping PY2.
|
||||
t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S")
|
||||
prefix = '%s,%03d ' % (t, record.msecs)
|
||||
prefix = '{t},{record.msecs:03.0f} '.format(**locals())
|
||||
prefix += " " * get_indentation()
|
||||
formatted = "".join([
|
||||
prefix + line
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import os.path
|
||||
|
||||
DELETE_MARKER_MESSAGE = '''\
|
||||
This file is placed here by pip to indicate the source was put
|
||||
here by pip.
|
||||
|
||||
Once this package is successfully installed this source code will be
|
||||
deleted (unless you remove this file).
|
||||
'''
|
||||
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
|
||||
|
||||
|
||||
def write_delete_marker_file(directory):
|
||||
# type: (str) -> None
|
||||
"""
|
||||
Write the pip delete marker file into this directory.
|
||||
"""
|
||||
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
|
||||
with open(filepath, 'w') as marker_fp:
|
||||
marker_fp.write(DELETE_MARKER_MESSAGE)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,7 @@
|
||||
"""Utilities for defining models
|
||||
"""
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: disallow-untyped-defs=False
|
||||
|
||||
import operator
|
||||
|
||||
@@ -8,6 +10,8 @@ class KeyBasedCompareMixin(object):
|
||||
"""Provides comparison capabilities that is based on a key
|
||||
"""
|
||||
|
||||
__slots__ = ['_compare_key', '_defining_class']
|
||||
|
||||
def __init__(self, key, defining_class):
|
||||
self._compare_key = key
|
||||
self._defining_class = defining_class
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from pip._vendor import lockfile, pkg_resources
|
||||
from pip._vendor.packaging import version as packaging_version
|
||||
|
||||
from pip._internal.cli.cmdoptions import make_search_scope
|
||||
from pip._internal.index import PackageFinder
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.misc import ensure_dir, get_installed_version
|
||||
from pip._internal.utils.packaging import get_installer
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
import optparse
|
||||
from typing import Any, Dict
|
||||
from pip._internal.download import PipSession
|
||||
|
||||
|
||||
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SelfCheckState(object):
|
||||
def __init__(self, cache_dir):
|
||||
# type: (str) -> None
|
||||
self.state = {} # type: Dict[str, Any]
|
||||
self.statefile_path = None
|
||||
|
||||
# Try to load the existing state
|
||||
if cache_dir:
|
||||
self.statefile_path = os.path.join(cache_dir, "selfcheck.json")
|
||||
try:
|
||||
with open(self.statefile_path) as statefile:
|
||||
self.state = json.load(statefile)[sys.prefix]
|
||||
except (IOError, ValueError, KeyError):
|
||||
# Explicitly suppressing exceptions, since we don't want to
|
||||
# error out if the cache file is invalid.
|
||||
pass
|
||||
|
||||
def save(self, pypi_version, current_time):
|
||||
# type: (str, datetime.datetime) -> None
|
||||
# If we do not have a path to cache in, don't bother saving.
|
||||
if not self.statefile_path:
|
||||
return
|
||||
|
||||
# Check to make sure that we own the directory
|
||||
if not check_path_owner(os.path.dirname(self.statefile_path)):
|
||||
return
|
||||
|
||||
# Now that we've ensured the directory is owned by this user, we'll go
|
||||
# ahead and make sure that all our directories are created.
|
||||
ensure_dir(os.path.dirname(self.statefile_path))
|
||||
|
||||
# Attempt to write out our version check file
|
||||
with lockfile.LockFile(self.statefile_path):
|
||||
if os.path.exists(self.statefile_path):
|
||||
with open(self.statefile_path) as statefile:
|
||||
state = json.load(statefile)
|
||||
else:
|
||||
state = {}
|
||||
|
||||
state[sys.prefix] = {
|
||||
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
|
||||
"pypi_version": pypi_version,
|
||||
}
|
||||
|
||||
with open(self.statefile_path, "w") as statefile:
|
||||
json.dump(state, statefile, sort_keys=True,
|
||||
separators=(",", ":"))
|
||||
|
||||
|
||||
def was_installed_by_pip(pkg):
|
||||
# type: (str) -> bool
|
||||
"""Checks whether pkg was installed by pip
|
||||
|
||||
This is used not to display the upgrade message when pip is in fact
|
||||
installed by system package manager, such as dnf on Fedora.
|
||||
"""
|
||||
try:
|
||||
dist = pkg_resources.get_distribution(pkg)
|
||||
return "pip" == get_installer(dist)
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return False
|
||||
|
||||
|
||||
def pip_version_check(session, options):
|
||||
# type: (PipSession, optparse.Values) -> None
|
||||
"""Check for an update for pip.
|
||||
|
||||
Limit the frequency of checks to once per week. State is stored either in
|
||||
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
|
||||
of the pip script path.
|
||||
"""
|
||||
installed_version = get_installed_version("pip")
|
||||
if not installed_version:
|
||||
return
|
||||
|
||||
pip_version = packaging_version.parse(installed_version)
|
||||
pypi_version = None
|
||||
|
||||
try:
|
||||
state = SelfCheckState(cache_dir=options.cache_dir)
|
||||
|
||||
current_time = datetime.datetime.utcnow()
|
||||
# Determine if we need to refresh the state
|
||||
if "last_check" in state.state and "pypi_version" in state.state:
|
||||
last_check = datetime.datetime.strptime(
|
||||
state.state["last_check"],
|
||||
SELFCHECK_DATE_FMT
|
||||
)
|
||||
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
|
||||
pypi_version = state.state["pypi_version"]
|
||||
|
||||
# Refresh the version if we need to or just see if we need to warn
|
||||
if pypi_version is None:
|
||||
# Lets use PackageFinder to see what the latest pip version is
|
||||
search_scope = make_search_scope(options, suppress_no_index=True)
|
||||
|
||||
# Pass allow_yanked=False so we don't suggest upgrading to a
|
||||
# yanked version.
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=False,
|
||||
allow_all_prereleases=False, # Explicitly set to False
|
||||
)
|
||||
|
||||
finder = PackageFinder.create(
|
||||
search_scope=search_scope,
|
||||
selection_prefs=selection_prefs,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
session=session,
|
||||
)
|
||||
candidate = finder.find_candidates("pip").get_best()
|
||||
if candidate is None:
|
||||
return
|
||||
pypi_version = str(candidate.version)
|
||||
|
||||
# save that we've performed a check
|
||||
state.save(pypi_version, current_time)
|
||||
|
||||
remote_version = packaging_version.parse(pypi_version)
|
||||
|
||||
local_version_is_older = (
|
||||
pip_version < remote_version and
|
||||
pip_version.base_version != remote_version.base_version and
|
||||
was_installed_by_pip('pip')
|
||||
)
|
||||
|
||||
# Determine if our pypi_version is older
|
||||
if not local_version_is_older:
|
||||
return
|
||||
|
||||
# Advise "python -m pip" on Windows to avoid issues
|
||||
# with overwriting pip.exe.
|
||||
if WINDOWS:
|
||||
pip_cmd = "python -m pip"
|
||||
else:
|
||||
pip_cmd = "pip"
|
||||
logger.warning(
|
||||
"You are using pip version %s, however version %s is "
|
||||
"available.\nYou should consider upgrading via the "
|
||||
"'%s install --upgrade pip' command.",
|
||||
pip_version, pypi_version, pip_cmd
|
||||
)
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"There was an error checking the latest version of pip",
|
||||
exc_info=True,
|
||||
)
|
||||
@@ -3,7 +3,7 @@ import sys
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
# Shim to wrap setup.py invocation with setuptools
|
||||
#
|
||||
@@ -20,17 +20,162 @@ _SETUPTOOLS_SHIM = (
|
||||
)
|
||||
|
||||
|
||||
def make_setuptools_shim_args(setup_py_path, unbuffered_output=False):
|
||||
# type: (str, bool) -> List[str]
|
||||
def make_setuptools_shim_args(
|
||||
setup_py_path, # type: str
|
||||
global_options=None, # type: Sequence[str]
|
||||
no_user_config=False, # type: bool
|
||||
unbuffered_output=False # type: bool
|
||||
):
|
||||
# type: (...) -> List[str]
|
||||
"""
|
||||
Get setuptools command arguments with shim wrapped setup file invocation.
|
||||
|
||||
:param setup_py_path: The path to setup.py to be wrapped.
|
||||
:param global_options: Additional global options.
|
||||
:param no_user_config: If True, disables personal user configuration.
|
||||
:param unbuffered_output: If True, adds the unbuffered switch to the
|
||||
argument list.
|
||||
"""
|
||||
args = [sys.executable]
|
||||
if unbuffered_output:
|
||||
args.append('-u')
|
||||
args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)])
|
||||
args += ["-u"]
|
||||
args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
|
||||
if global_options:
|
||||
args += global_options
|
||||
if no_user_config:
|
||||
args += ["--no-user-cfg"]
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_bdist_wheel_args(
|
||||
setup_py_path, # type: str
|
||||
global_options, # type: Sequence[str]
|
||||
build_options, # type: Sequence[str]
|
||||
destination_dir, # type: str
|
||||
):
|
||||
# type: (...) -> List[str]
|
||||
# NOTE: Eventually, we'd want to also -S to the flags here, when we're
|
||||
# isolating. Currently, it breaks Python in virtualenvs, because it
|
||||
# relies on site.py to find parts of the standard library outside the
|
||||
# virtualenv.
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
unbuffered_output=True
|
||||
)
|
||||
args += ["bdist_wheel", "-d", destination_dir]
|
||||
args += build_options
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_clean_args(
|
||||
setup_py_path, # type: str
|
||||
global_options, # type: Sequence[str]
|
||||
):
|
||||
# type: (...) -> List[str]
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
unbuffered_output=True
|
||||
)
|
||||
args += ["clean", "--all"]
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_develop_args(
|
||||
setup_py_path, # type: str
|
||||
global_options, # type: Sequence[str]
|
||||
install_options, # type: Sequence[str]
|
||||
no_user_config, # type: bool
|
||||
prefix, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
):
|
||||
# type: (...) -> List[str]
|
||||
assert not (use_user_site and prefix)
|
||||
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
no_user_config=no_user_config,
|
||||
)
|
||||
|
||||
args += ["develop", "--no-deps"]
|
||||
|
||||
args += install_options
|
||||
|
||||
if prefix:
|
||||
args += ["--prefix", prefix]
|
||||
if home is not None:
|
||||
args += ["--home", home]
|
||||
|
||||
if use_user_site:
|
||||
args += ["--user", "--prefix="]
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_egg_info_args(
|
||||
setup_py_path, # type: str
|
||||
egg_info_dir, # type: Optional[str]
|
||||
no_user_config, # type: bool
|
||||
):
|
||||
# type: (...) -> List[str]
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path, no_user_config=no_user_config
|
||||
)
|
||||
|
||||
args += ["egg_info"]
|
||||
|
||||
if egg_info_dir:
|
||||
args += ["--egg-base", egg_info_dir]
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_install_args(
|
||||
setup_py_path, # type: str
|
||||
global_options, # type: Sequence[str]
|
||||
install_options, # type: Sequence[str]
|
||||
record_filename, # type: str
|
||||
root, # type: Optional[str]
|
||||
prefix, # type: Optional[str]
|
||||
header_dir, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
no_user_config, # type: bool
|
||||
pycompile # type: bool
|
||||
):
|
||||
# type: (...) -> List[str]
|
||||
assert not (use_user_site and prefix)
|
||||
assert not (use_user_site and root)
|
||||
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
no_user_config=no_user_config,
|
||||
unbuffered_output=True
|
||||
)
|
||||
args += ["install", "--record", record_filename]
|
||||
args += ["--single-version-externally-managed"]
|
||||
|
||||
if root is not None:
|
||||
args += ["--root", root]
|
||||
if prefix is not None:
|
||||
args += ["--prefix", prefix]
|
||||
if home is not None:
|
||||
args += ["--home", home]
|
||||
if use_user_site:
|
||||
args += ["--user", "--prefix="]
|
||||
|
||||
if pycompile:
|
||||
args += ["--compile"]
|
||||
else:
|
||||
args += ["--no-compile"]
|
||||
|
||||
if header_dir:
|
||||
args += ["--install-headers", header_dir]
|
||||
|
||||
args += install_options
|
||||
|
||||
return args
|
||||
|
||||
@@ -5,12 +5,95 @@ import itertools
|
||||
import logging
|
||||
import os.path
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pip._vendor.contextlib2 import ExitStack
|
||||
from pip._vendor.six import ensure_text
|
||||
|
||||
from pip._internal.utils.misc import enum, rmtree
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, Dict, Iterator, Optional, TypeVar, Union
|
||||
|
||||
_T = TypeVar('_T', bound='TempDirectory')
|
||||
|
||||
from pip._internal.utils.misc import rmtree
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Kinds of temporary directories. Only needed for ones that are
|
||||
# globally-managed.
|
||||
tempdir_kinds = enum(
|
||||
BUILD_ENV="build-env",
|
||||
EPHEM_WHEEL_CACHE="ephem-wheel-cache",
|
||||
REQ_BUILD="req-build",
|
||||
)
|
||||
|
||||
|
||||
_tempdir_manager = None # type: Optional[ExitStack]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def global_tempdir_manager():
|
||||
# type: () -> Iterator[None]
|
||||
global _tempdir_manager
|
||||
with ExitStack() as stack:
|
||||
old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_tempdir_manager = old_tempdir_manager
|
||||
|
||||
|
||||
class TempDirectoryTypeRegistry(object):
|
||||
"""Manages temp directory behavior
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self._should_delete = {} # type: Dict[str, bool]
|
||||
|
||||
def set_delete(self, kind, value):
|
||||
# type: (str, bool) -> None
|
||||
"""Indicate whether a TempDirectory of the given kind should be
|
||||
auto-deleted.
|
||||
"""
|
||||
self._should_delete[kind] = value
|
||||
|
||||
def get_delete(self, kind):
|
||||
# type: (str) -> bool
|
||||
"""Get configured auto-delete flag for a given TempDirectory type,
|
||||
default True.
|
||||
"""
|
||||
return self._should_delete.get(kind, True)
|
||||
|
||||
|
||||
_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tempdir_registry():
|
||||
# type: () -> Iterator[TempDirectoryTypeRegistry]
|
||||
"""Provides a scoped global tempdir registry that can be used to dictate
|
||||
whether directories should be deleted.
|
||||
"""
|
||||
global _tempdir_registry
|
||||
old_tempdir_registry = _tempdir_registry
|
||||
_tempdir_registry = TempDirectoryTypeRegistry()
|
||||
try:
|
||||
yield _tempdir_registry
|
||||
finally:
|
||||
_tempdir_registry = old_tempdir_registry
|
||||
|
||||
|
||||
class _Default(object):
|
||||
pass
|
||||
|
||||
|
||||
_default = _Default()
|
||||
|
||||
|
||||
class TempDirectory(object):
|
||||
"""Helper class that owns and cleans up a temporary directory.
|
||||
|
||||
@@ -19,69 +102,101 @@ class TempDirectory(object):
|
||||
|
||||
Attributes:
|
||||
path
|
||||
Location to the created temporary directory or None
|
||||
Location to the created temporary directory
|
||||
delete
|
||||
Whether the directory should be deleted when exiting
|
||||
(when used as a contextmanager)
|
||||
|
||||
Methods:
|
||||
create()
|
||||
Creates a temporary directory and stores its path in the path
|
||||
attribute.
|
||||
cleanup()
|
||||
Deletes the temporary directory and sets path attribute to None
|
||||
Deletes the temporary directory
|
||||
|
||||
When used as a context manager, a temporary directory is created on
|
||||
entering the context and, if the delete attribute is True, on exiting the
|
||||
context the created directory is deleted.
|
||||
When used as a context manager, if the delete attribute is True, on
|
||||
exiting the context the temporary directory is deleted.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None, delete=None, kind="temp"):
|
||||
def __init__(
|
||||
self,
|
||||
path=None, # type: Optional[str]
|
||||
delete=_default, # type: Union[bool, None, _Default]
|
||||
kind="temp", # type: str
|
||||
globally_managed=False, # type: bool
|
||||
):
|
||||
super(TempDirectory, self).__init__()
|
||||
|
||||
if path is None and delete is None:
|
||||
# If we were not given an explicit directory, and we were not given
|
||||
# an explicit delete option, then we'll default to deleting.
|
||||
delete = True
|
||||
if delete is _default:
|
||||
if path is not None:
|
||||
# If we were given an explicit directory, resolve delete option
|
||||
# now.
|
||||
delete = False
|
||||
else:
|
||||
# Otherwise, we wait until cleanup and see what
|
||||
# tempdir_registry says.
|
||||
delete = None
|
||||
|
||||
self.path = path
|
||||
if path is None:
|
||||
path = self._create(kind)
|
||||
|
||||
self._path = path
|
||||
self._deleted = False
|
||||
self.delete = delete
|
||||
self.kind = kind
|
||||
|
||||
if globally_managed:
|
||||
assert _tempdir_manager is not None
|
||||
_tempdir_manager.enter_context(self)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
# type: () -> str
|
||||
assert not self._deleted, (
|
||||
"Attempted to access deleted path: {}".format(self._path)
|
||||
)
|
||||
return self._path
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<{} {!r}>".format(self.__class__.__name__, self.path)
|
||||
|
||||
def __enter__(self):
|
||||
self.create()
|
||||
# type: (_T) -> _T
|
||||
return self
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
if self.delete:
|
||||
# type: (Any, Any, Any) -> None
|
||||
if self.delete is not None:
|
||||
delete = self.delete
|
||||
elif _tempdir_registry:
|
||||
delete = _tempdir_registry.get_delete(self.kind)
|
||||
else:
|
||||
delete = True
|
||||
|
||||
if delete:
|
||||
self.cleanup()
|
||||
|
||||
def create(self):
|
||||
def _create(self, kind):
|
||||
# type: (str) -> str
|
||||
"""Create a temporary directory and store its path in self.path
|
||||
"""
|
||||
if self.path is not None:
|
||||
logger.debug(
|
||||
"Skipped creation of temporary directory: {}".format(self.path)
|
||||
)
|
||||
return
|
||||
# We realpath here because some systems have their default tmpdir
|
||||
# symlinked to another directory. This tends to confuse build
|
||||
# scripts, so we canonicalize the path by traversing potential
|
||||
# symlinks here.
|
||||
self.path = os.path.realpath(
|
||||
tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
|
||||
path = os.path.realpath(
|
||||
tempfile.mkdtemp(prefix="pip-{}-".format(kind))
|
||||
)
|
||||
logger.debug("Created temporary directory: {}".format(self.path))
|
||||
logger.debug("Created temporary directory: %s", path)
|
||||
return path
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
"""Remove the temporary directory created and reset state
|
||||
"""
|
||||
if self.path is not None and os.path.exists(self.path):
|
||||
rmtree(self.path)
|
||||
self.path = None
|
||||
self._deleted = True
|
||||
if os.path.exists(self._path):
|
||||
# Make sure to pass unicode on Python 2 to make the contents also
|
||||
# use unicode, ensuring non-ASCII names and can be represented.
|
||||
rmtree(ensure_text(self._path))
|
||||
|
||||
|
||||
class AdjacentTempDirectory(TempDirectory):
|
||||
@@ -106,11 +221,13 @@ class AdjacentTempDirectory(TempDirectory):
|
||||
LEADING_CHARS = "-~.=%0123456789"
|
||||
|
||||
def __init__(self, original, delete=None):
|
||||
super(AdjacentTempDirectory, self).__init__(delete=delete)
|
||||
# type: (str, Optional[bool]) -> None
|
||||
self.original = original.rstrip('/\\')
|
||||
super(AdjacentTempDirectory, self).__init__(delete=delete)
|
||||
|
||||
@classmethod
|
||||
def _generate_names(cls, name):
|
||||
# type: (str) -> Iterator[str]
|
||||
"""Generates a series of temporary names.
|
||||
|
||||
The algorithm replaces the leading characters in the name
|
||||
@@ -133,7 +250,8 @@ class AdjacentTempDirectory(TempDirectory):
|
||||
if new_name != name:
|
||||
yield new_name
|
||||
|
||||
def create(self):
|
||||
def _create(self, kind):
|
||||
# type: (str) -> str
|
||||
root, name = os.path.split(self.original)
|
||||
for candidate in self._generate_names(name):
|
||||
path = os.path.join(root, candidate)
|
||||
@@ -144,12 +262,13 @@ class AdjacentTempDirectory(TempDirectory):
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
else:
|
||||
self.path = os.path.realpath(path)
|
||||
path = os.path.realpath(path)
|
||||
break
|
||||
|
||||
if not self.path:
|
||||
else:
|
||||
# Final fallback on the default behavior.
|
||||
self.path = os.path.realpath(
|
||||
tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
|
||||
path = os.path.realpath(
|
||||
tempfile.mkdtemp(prefix="pip-{}-".format(kind))
|
||||
)
|
||||
logger.debug("Created temporary directory: {}".format(self.path))
|
||||
|
||||
logger.debug("Created temporary directory: %s", path)
|
||||
return path
|
||||
|
||||
@@ -27,3 +27,12 @@ Ref: https://github.com/python/mypy/issues/3216
|
||||
"""
|
||||
|
||||
MYPY_CHECK_RUNNING = False
|
||||
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import cast
|
||||
else:
|
||||
# typing's cast() is needed at runtime, but we don't want to import typing.
|
||||
# Thus, we use a dummy no-op version, which we tell mypy to ignore.
|
||||
def cast(type_, value): # type: ignore
|
||||
return value
|
||||
|
||||
@@ -1,424 +0,0 @@
|
||||
from __future__ import absolute_import, division
|
||||
|
||||
import contextlib
|
||||
import itertools
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from signal import SIGINT, default_int_handler, signal
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
|
||||
from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
|
||||
from pip._vendor.progress.spinner import Spinner
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import get_indentation
|
||||
from pip._internal.utils.misc import format_size
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, Iterator, IO
|
||||
|
||||
try:
|
||||
from pip._vendor import colorama
|
||||
# Lots of different errors can come from this, including SystemError and
|
||||
# ImportError.
|
||||
except Exception:
|
||||
colorama = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _select_progress_class(preferred, fallback):
|
||||
encoding = getattr(preferred.file, "encoding", None)
|
||||
|
||||
# If we don't know what encoding this file is in, then we'll just assume
|
||||
# that it doesn't support unicode and use the ASCII bar.
|
||||
if not encoding:
|
||||
return fallback
|
||||
|
||||
# Collect all of the possible characters we want to use with the preferred
|
||||
# bar.
|
||||
characters = [
|
||||
getattr(preferred, "empty_fill", six.text_type()),
|
||||
getattr(preferred, "fill", six.text_type()),
|
||||
]
|
||||
characters += list(getattr(preferred, "phases", []))
|
||||
|
||||
# Try to decode the characters we're using for the bar using the encoding
|
||||
# of the given file, if this works then we'll assume that we can use the
|
||||
# fancier bar and if not we'll fall back to the plaintext bar.
|
||||
try:
|
||||
six.text_type().join(characters).encode(encoding)
|
||||
except UnicodeEncodeError:
|
||||
return fallback
|
||||
else:
|
||||
return preferred
|
||||
|
||||
|
||||
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
|
||||
|
||||
|
||||
class InterruptibleMixin(object):
|
||||
"""
|
||||
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
||||
|
||||
This allows downloads to be interrupted without leaving temporary state
|
||||
(like hidden cursors) behind.
|
||||
|
||||
This class is similar to the progress library's existing SigIntMixin
|
||||
helper, but as of version 1.2, that helper has the following problems:
|
||||
|
||||
1. It calls sys.exit().
|
||||
2. It discards the existing SIGINT handler completely.
|
||||
3. It leaves its own handler in place even after an uninterrupted finish,
|
||||
which will have unexpected delayed effects if the user triggers an
|
||||
unrelated keyboard interrupt some time after a progress-displaying
|
||||
download has already completed, for example.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Save the original SIGINT handler for later.
|
||||
"""
|
||||
super(InterruptibleMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
self.original_handler = signal(SIGINT, self.handle_sigint)
|
||||
|
||||
# If signal() returns None, the previous handler was not installed from
|
||||
# Python, and we cannot restore it. This probably should not happen,
|
||||
# but if it does, we must restore something sensible instead, at least.
|
||||
# The least bad option should be Python's default SIGINT handler, which
|
||||
# just raises KeyboardInterrupt.
|
||||
if self.original_handler is None:
|
||||
self.original_handler = default_int_handler
|
||||
|
||||
def finish(self):
|
||||
"""
|
||||
Restore the original SIGINT handler after finishing.
|
||||
|
||||
This should happen regardless of whether the progress display finishes
|
||||
normally, or gets interrupted.
|
||||
"""
|
||||
super(InterruptibleMixin, self).finish()
|
||||
signal(SIGINT, self.original_handler)
|
||||
|
||||
def handle_sigint(self, signum, frame):
|
||||
"""
|
||||
Call self.finish() before delegating to the original SIGINT handler.
|
||||
|
||||
This handler should only be in place while the progress display is
|
||||
active.
|
||||
"""
|
||||
self.finish()
|
||||
self.original_handler(signum, frame)
|
||||
|
||||
|
||||
class SilentBar(Bar):
|
||||
|
||||
def update(self):
|
||||
pass
|
||||
|
||||
|
||||
class BlueEmojiBar(IncrementalBar):
|
||||
|
||||
suffix = "%(percent)d%%"
|
||||
bar_prefix = " "
|
||||
bar_suffix = " "
|
||||
phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
|
||||
|
||||
|
||||
class DownloadProgressMixin(object):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
|
||||
self.message = (" " * (get_indentation() + 2)) + self.message
|
||||
|
||||
@property
|
||||
def downloaded(self):
|
||||
return format_size(self.index)
|
||||
|
||||
@property
|
||||
def download_speed(self):
|
||||
# Avoid zero division errors...
|
||||
if self.avg == 0.0:
|
||||
return "..."
|
||||
return format_size(1 / self.avg) + "/s"
|
||||
|
||||
@property
|
||||
def pretty_eta(self):
|
||||
if self.eta:
|
||||
return "eta %s" % self.eta_td
|
||||
return ""
|
||||
|
||||
def iter(self, it, n=1):
|
||||
for x in it:
|
||||
yield x
|
||||
self.next(n)
|
||||
self.finish()
|
||||
|
||||
|
||||
class WindowsMixin(object):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes
|
||||
# even with colorama. So we'll ensure that hide_cursor is False on
|
||||
# Windows.
|
||||
# This call needs to go before the super() call, so that hide_cursor
|
||||
# is set in time. The base progress bar class writes the "hide cursor"
|
||||
# code to the terminal in its init, so if we don't set this soon
|
||||
# enough, we get a "hide" with no corresponding "show"...
|
||||
if WINDOWS and self.hide_cursor:
|
||||
self.hide_cursor = False
|
||||
|
||||
super(WindowsMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
# Check if we are running on Windows and we have the colorama module,
|
||||
# if we do then wrap our file with it.
|
||||
if WINDOWS and colorama:
|
||||
self.file = colorama.AnsiToWin32(self.file)
|
||||
# The progress code expects to be able to call self.file.isatty()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.isatty = lambda: self.file.wrapped.isatty()
|
||||
# The progress code expects to be able to call self.file.flush()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.flush = lambda: self.file.wrapped.flush()
|
||||
|
||||
|
||||
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
|
||||
DownloadProgressMixin):
|
||||
|
||||
file = sys.stdout
|
||||
message = "%(percent)d%%"
|
||||
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
||||
|
||||
# NOTE: The "type: ignore" comments on the following classes are there to
|
||||
# work around https://github.com/python/typing/issues/241
|
||||
|
||||
|
||||
class DefaultDownloadProgressBar(BaseDownloadProgressBar,
|
||||
_BaseBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
|
||||
pass
|
||||
|
||||
|
||||
class DownloadBar(BaseDownloadProgressBar, # type: ignore
|
||||
Bar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
|
||||
FillingCirclesBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
|
||||
BlueEmojiBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
|
||||
DownloadProgressMixin, Spinner):
|
||||
|
||||
file = sys.stdout
|
||||
suffix = "%(downloaded)s %(download_speed)s"
|
||||
|
||||
def next_phase(self):
|
||||
if not hasattr(self, "_phaser"):
|
||||
self._phaser = itertools.cycle(self.phases)
|
||||
return next(self._phaser)
|
||||
|
||||
def update(self):
|
||||
message = self.message % self
|
||||
phase = self.next_phase()
|
||||
suffix = self.suffix % self
|
||||
line = ''.join([
|
||||
message,
|
||||
" " if message else "",
|
||||
phase,
|
||||
" " if suffix else "",
|
||||
suffix,
|
||||
])
|
||||
|
||||
self.writeln(line)
|
||||
|
||||
|
||||
BAR_TYPES = {
|
||||
"off": (DownloadSilentBar, DownloadSilentBar),
|
||||
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
||||
"ascii": (DownloadBar, DownloadProgressSpinner),
|
||||
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
||||
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
|
||||
}
|
||||
|
||||
|
||||
def DownloadProgressProvider(progress_bar, max=None):
|
||||
if max is None or max == 0:
|
||||
return BAR_TYPES[progress_bar][1]().iter
|
||||
else:
|
||||
return BAR_TYPES[progress_bar][0](max=max).iter
|
||||
|
||||
|
||||
################################################################
|
||||
# Generic "something is happening" spinners
|
||||
#
|
||||
# We don't even try using progress.spinner.Spinner here because it's actually
|
||||
# simpler to reimplement from scratch than to coerce their code into doing
|
||||
# what we need.
|
||||
################################################################
|
||||
|
||||
@contextlib.contextmanager
|
||||
def hidden_cursor(file):
|
||||
# type: (IO) -> Iterator[None]
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||
# even via colorama. So don't even try.
|
||||
if WINDOWS:
|
||||
yield
|
||||
# We don't want to clutter the output with control characters if we're
|
||||
# writing to a file, or if the user is running with --quiet.
|
||||
# See https://github.com/pypa/pip/issues/3418
|
||||
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
||||
yield
|
||||
else:
|
||||
file.write(HIDE_CURSOR)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
file.write(SHOW_CURSOR)
|
||||
|
||||
|
||||
class RateLimiter(object):
|
||||
def __init__(self, min_update_interval_seconds):
|
||||
# type: (float) -> None
|
||||
self._min_update_interval_seconds = min_update_interval_seconds
|
||||
self._last_update = 0 # type: float
|
||||
|
||||
def ready(self):
|
||||
# type: () -> bool
|
||||
now = time.time()
|
||||
delta = now - self._last_update
|
||||
return delta >= self._min_update_interval_seconds
|
||||
|
||||
def reset(self):
|
||||
# type: () -> None
|
||||
self._last_update = time.time()
|
||||
|
||||
|
||||
class SpinnerInterface(object):
|
||||
def spin(self):
|
||||
# type: () -> None
|
||||
raise NotImplementedError()
|
||||
|
||||
def finish(self, final_status):
|
||||
# type: (str) -> None
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class InteractiveSpinner(SpinnerInterface):
|
||||
def __init__(self, message, file=None, spin_chars="-\\|/",
|
||||
# Empirically, 8 updates/second looks nice
|
||||
min_update_interval_seconds=0.125):
|
||||
self._message = message
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
self._file = file
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._finished = False
|
||||
|
||||
self._spin_cycle = itertools.cycle(spin_chars)
|
||||
|
||||
self._file.write(" " * get_indentation() + self._message + " ... ")
|
||||
self._width = 0
|
||||
|
||||
def _write(self, status):
|
||||
assert not self._finished
|
||||
# Erase what we wrote before by backspacing to the beginning, writing
|
||||
# spaces to overwrite the old text, and then backspacing again
|
||||
backup = "\b" * self._width
|
||||
self._file.write(backup + " " * self._width + backup)
|
||||
# Now we have a blank slate to add our status
|
||||
self._file.write(status)
|
||||
self._width = len(status)
|
||||
self._file.flush()
|
||||
self._rate_limiter.reset()
|
||||
|
||||
def spin(self):
|
||||
# type: () -> None
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._write(next(self._spin_cycle))
|
||||
|
||||
def finish(self, final_status):
|
||||
# type: (str) -> None
|
||||
if self._finished:
|
||||
return
|
||||
self._write(final_status)
|
||||
self._file.write("\n")
|
||||
self._file.flush()
|
||||
self._finished = True
|
||||
|
||||
|
||||
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
||||
# We still print updates occasionally (once every 60 seconds by default) to
|
||||
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
||||
# an indication that a task has frozen.
|
||||
class NonInteractiveSpinner(SpinnerInterface):
|
||||
def __init__(self, message, min_update_interval_seconds=60):
|
||||
# type: (str, float) -> None
|
||||
self._message = message
|
||||
self._finished = False
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._update("started")
|
||||
|
||||
def _update(self, status):
|
||||
assert not self._finished
|
||||
self._rate_limiter.reset()
|
||||
logger.info("%s: %s", self._message, status)
|
||||
|
||||
def spin(self):
|
||||
# type: () -> None
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._update("still running...")
|
||||
|
||||
def finish(self, final_status):
|
||||
# type: (str) -> None
|
||||
if self._finished:
|
||||
return
|
||||
self._update("finished with status '%s'" % (final_status,))
|
||||
self._finished = True
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def open_spinner(message):
|
||||
# type: (str) -> Iterator[SpinnerInterface]
|
||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||
# through the logging system, but it acts like it has level INFO,
|
||||
# i.e. it's only displayed if we're at level INFO or better.
|
||||
# Non-interactive spinner goes through the logging system, so it is always
|
||||
# in sync with logging configuration.
|
||||
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
||||
spinner = InteractiveSpinner(message) # type: SpinnerInterface
|
||||
else:
|
||||
spinner = NonInteractiveSpinner(message)
|
||||
try:
|
||||
with hidden_cursor(sys.stdout):
|
||||
yield spinner
|
||||
except KeyboardInterrupt:
|
||||
spinner.finish("canceled")
|
||||
raise
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
raise
|
||||
else:
|
||||
spinner.finish("done")
|
||||
@@ -1,34 +1,119 @@
|
||||
import os.path
|
||||
from __future__ import absolute_import
|
||||
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import sys
|
||||
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
|
||||
r"include-system-site-packages\s*=\s*(?P<value>true|false)"
|
||||
)
|
||||
|
||||
|
||||
def _running_under_venv():
|
||||
# type: () -> bool
|
||||
"""Checks if sys.base_prefix and sys.prefix match.
|
||||
|
||||
This handles PEP 405 compliant virtual environments.
|
||||
"""
|
||||
return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
|
||||
|
||||
|
||||
def _running_under_regular_virtualenv():
|
||||
# type: () -> bool
|
||||
"""Checks if sys.real_prefix is set.
|
||||
|
||||
This handles virtual environments created with pypa's virtualenv.
|
||||
"""
|
||||
# pypa/virtualenv case
|
||||
return hasattr(sys, 'real_prefix')
|
||||
|
||||
|
||||
def running_under_virtualenv():
|
||||
# type: () -> bool
|
||||
"""Return True if we're running inside a virtualenv, False otherwise.
|
||||
"""
|
||||
Return True if we're running inside a virtualenv, False otherwise.
|
||||
return _running_under_venv() or _running_under_regular_virtualenv()
|
||||
|
||||
|
||||
def _get_pyvenv_cfg_lines():
|
||||
# type: () -> Optional[List[str]]
|
||||
"""Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
|
||||
|
||||
Returns None, if it could not read/access the file.
|
||||
"""
|
||||
if hasattr(sys, 'real_prefix'):
|
||||
# pypa/virtualenv case
|
||||
return True
|
||||
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
|
||||
# PEP 405 venv
|
||||
pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg')
|
||||
try:
|
||||
# Although PEP 405 does not specify, the built-in venv module always
|
||||
# writes with UTF-8. (pypa/pip#8717)
|
||||
with io.open(pyvenv_cfg_file, encoding='utf-8') as f:
|
||||
return f.read().splitlines() # avoids trailing newlines
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
|
||||
def _no_global_under_venv():
|
||||
# type: () -> bool
|
||||
"""Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
|
||||
|
||||
PEP 405 specifies that when system site-packages are not supposed to be
|
||||
visible from a virtual environment, `pyvenv.cfg` must contain the following
|
||||
line:
|
||||
|
||||
include-system-site-packages = false
|
||||
|
||||
Additionally, log a warning if accessing the file fails.
|
||||
"""
|
||||
cfg_lines = _get_pyvenv_cfg_lines()
|
||||
if cfg_lines is None:
|
||||
# We're not in a "sane" venv, so assume there is no system
|
||||
# site-packages access (since that's PEP 405's default state).
|
||||
logger.warning(
|
||||
"Could not access 'pyvenv.cfg' despite a virtual environment "
|
||||
"being active. Assuming global site-packages is not accessible "
|
||||
"in this environment."
|
||||
)
|
||||
return True
|
||||
|
||||
for line in cfg_lines:
|
||||
match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
|
||||
if match is not None and match.group('value') == 'false':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _no_global_under_regular_virtualenv():
|
||||
# type: () -> bool
|
||||
"""Check if "no-global-site-packages.txt" exists beside site.py
|
||||
|
||||
This mirrors logic in pypa/virtualenv for determining whether system
|
||||
site-packages are visible in the virtual environment.
|
||||
"""
|
||||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
||||
no_global_site_packages_file = os.path.join(
|
||||
site_mod_dir, 'no-global-site-packages.txt',
|
||||
)
|
||||
return os.path.exists(no_global_site_packages_file)
|
||||
|
||||
|
||||
def virtualenv_no_global():
|
||||
# type: () -> bool
|
||||
"""Returns a boolean, whether running in venv with no system site-packages.
|
||||
"""
|
||||
Return True if in a venv and no system site packages.
|
||||
"""
|
||||
# this mirrors the logic in virtualenv.py for locating the
|
||||
# no-global-site-packages.txt file
|
||||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
||||
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
|
||||
if running_under_virtualenv() and os.path.isfile(no_global_file):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
# PEP 405 compliance needs to be checked first since virtualenv >=20 would
|
||||
# return True for both checks, but is only able to use the PEP 405 config.
|
||||
if _running_under_venv():
|
||||
return _no_global_under_venv()
|
||||
|
||||
if _running_under_regular_virtualenv():
|
||||
return _no_global_under_regular_virtualenv()
|
||||
|
||||
return False
|
||||
|
||||
Reference in New Issue
Block a user