mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-23 04:41:11 +00:00
Bug 1915242 - Upgrade setuptools to 74.0.0 r=mach-reviewers,ahal
Differential Revision: https://phabricator.services.mozilla.com/D220347
This commit is contained in:
parent
99a97c0d47
commit
d415539adf
17
third_party/python/poetry.lock
generated
vendored
17
third_party/python/poetry.lock
generated
vendored
@ -1289,18 +1289,23 @@ tornado = ["tornado (>=5)"]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "70.0.0"
|
||||
version = "74.0.0"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"},
|
||||
{file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"},
|
||||
{file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"},
|
||||
{file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||
testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"]
|
||||
core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
@ -1596,4 +1601,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "28eb4b701c12ac0b21eb247e08fedb84268ecf8e4b45e411bc557b8c0fdce177"
|
||||
content-hash = "95747455ee8b0232a9d1a90e02637ebc67a12259b566bb6b8fb320895e2ae984"
|
||||
|
2
third_party/python/requirements.in
vendored
2
third_party/python/requirements.in
vendored
@ -49,7 +49,7 @@ requests-unixsocket==0.2.0
|
||||
responses==0.10.6
|
||||
rsa==4.9
|
||||
sentry-sdk==0.14.3
|
||||
setuptools==70.0.0
|
||||
setuptools==74.0.0
|
||||
six==1.16.0
|
||||
slugid==2.0.0
|
||||
taskcluster==44.2.2
|
||||
|
6
third_party/python/requirements.txt
vendored
6
third_party/python/requirements.txt
vendored
@ -538,9 +538,9 @@ rsa==4.9 ; python_version >= "3.8" and python_version < "4" \
|
||||
sentry-sdk==0.14.3 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f \
|
||||
--hash=sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950
|
||||
setuptools==70.0.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4 \
|
||||
--hash=sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0
|
||||
setuptools==74.0.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f \
|
||||
--hash=sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e
|
||||
six==1.16.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
|
||||
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
|
||||
|
@ -1,6 +1,11 @@
|
||||
# don't import any costly modules
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
|
||||
report_url = (
|
||||
"https://github.com/pypa/setuptools/issues/new?"
|
||||
"template=distutils-deprecation.yml"
|
||||
)
|
||||
|
||||
|
||||
def warn_distutils_present():
|
||||
@ -23,7 +28,12 @@ def clear_distutils():
|
||||
return
|
||||
import warnings
|
||||
|
||||
warnings.warn("Setuptools is replacing distutils.")
|
||||
warnings.warn(
|
||||
"Setuptools is replacing distutils. Support for replacing "
|
||||
"an already imported distutils is deprecated. In the future, "
|
||||
"this condition will fail. "
|
||||
f"Register concerns at {report_url}"
|
||||
)
|
||||
mods = [
|
||||
name
|
||||
for name in sys.modules
|
||||
@ -38,6 +48,16 @@ def enabled():
|
||||
Allow selection of distutils by environment variable.
|
||||
"""
|
||||
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
||||
if which == 'stdlib':
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"Reliance on distutils from stdlib is deprecated. Users "
|
||||
"must rely on setuptools to provide the distutils module. "
|
||||
"Avoid importing distutils or import setuptools first, "
|
||||
"and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib. "
|
||||
f"Register concerns at {report_url}"
|
||||
)
|
||||
return which == 'local'
|
||||
|
||||
|
||||
@ -197,10 +217,10 @@ def add_shim():
|
||||
|
||||
|
||||
class shim:
|
||||
def __enter__(self):
|
||||
def __enter__(self) -> None:
|
||||
insert_shim()
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
def __exit__(self, exc: object, value: object, tb: object) -> None:
|
||||
_remove_shim()
|
||||
|
||||
|
||||
|
1022
third_party/python/setuptools/pkg_resources/__init__.py
vendored
1022
third_party/python/setuptools/pkg_resources/__init__.py
vendored
File diff suppressed because it is too large
Load Diff
@ -1,36 +0,0 @@
|
||||
"""Read resources contained within a package."""
|
||||
|
||||
from ._common import (
|
||||
as_file,
|
||||
files,
|
||||
Package,
|
||||
)
|
||||
|
||||
from ._legacy import (
|
||||
contents,
|
||||
open_binary,
|
||||
read_binary,
|
||||
open_text,
|
||||
read_text,
|
||||
is_resource,
|
||||
path,
|
||||
Resource,
|
||||
)
|
||||
|
||||
from .abc import ResourceReader
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Package',
|
||||
'Resource',
|
||||
'ResourceReader',
|
||||
'as_file',
|
||||
'contents',
|
||||
'files',
|
||||
'is_resource',
|
||||
'open_binary',
|
||||
'open_text',
|
||||
'path',
|
||||
'read_binary',
|
||||
'read_text',
|
||||
]
|
@ -1,170 +0,0 @@
|
||||
from contextlib import suppress
|
||||
from io import TextIOWrapper
|
||||
|
||||
from . import abc
|
||||
|
||||
|
||||
class SpecLoaderAdapter:
|
||||
"""
|
||||
Adapt a package spec to adapt the underlying loader.
|
||||
"""
|
||||
|
||||
def __init__(self, spec, adapter=lambda spec: spec.loader):
|
||||
self.spec = spec
|
||||
self.loader = adapter(spec)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.spec, name)
|
||||
|
||||
|
||||
class TraversableResourcesLoader:
|
||||
"""
|
||||
Adapt a loader to provide TraversableResources.
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
self.spec = spec
|
||||
|
||||
def get_resource_reader(self, name):
|
||||
return CompatibilityFiles(self.spec)._native()
|
||||
|
||||
|
||||
def _io_wrapper(file, mode='r', *args, **kwargs):
|
||||
if mode == 'r':
|
||||
return TextIOWrapper(file, *args, **kwargs)
|
||||
elif mode == 'rb':
|
||||
return file
|
||||
raise ValueError(
|
||||
"Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
|
||||
)
|
||||
|
||||
|
||||
class CompatibilityFiles:
|
||||
"""
|
||||
Adapter for an existing or non-existent resource reader
|
||||
to provide a compatibility .files().
|
||||
"""
|
||||
|
||||
class SpecPath(abc.Traversable):
|
||||
"""
|
||||
Path tied to a module spec.
|
||||
Can be read and exposes the resource reader children.
|
||||
"""
|
||||
|
||||
def __init__(self, spec, reader):
|
||||
self._spec = spec
|
||||
self._reader = reader
|
||||
|
||||
def iterdir(self):
|
||||
if not self._reader:
|
||||
return iter(())
|
||||
return iter(
|
||||
CompatibilityFiles.ChildPath(self._reader, path)
|
||||
for path in self._reader.contents()
|
||||
)
|
||||
|
||||
def is_file(self):
|
||||
return False
|
||||
|
||||
is_dir = is_file
|
||||
|
||||
def joinpath(self, other):
|
||||
if not self._reader:
|
||||
return CompatibilityFiles.OrphanPath(other)
|
||||
return CompatibilityFiles.ChildPath(self._reader, other)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._spec.name
|
||||
|
||||
def open(self, mode='r', *args, **kwargs):
|
||||
return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
|
||||
|
||||
class ChildPath(abc.Traversable):
|
||||
"""
|
||||
Path tied to a resource reader child.
|
||||
Can be read but doesn't expose any meaningful children.
|
||||
"""
|
||||
|
||||
def __init__(self, reader, name):
|
||||
self._reader = reader
|
||||
self._name = name
|
||||
|
||||
def iterdir(self):
|
||||
return iter(())
|
||||
|
||||
def is_file(self):
|
||||
return self._reader.is_resource(self.name)
|
||||
|
||||
def is_dir(self):
|
||||
return not self.is_file()
|
||||
|
||||
def joinpath(self, other):
|
||||
return CompatibilityFiles.OrphanPath(self.name, other)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def open(self, mode='r', *args, **kwargs):
|
||||
return _io_wrapper(
|
||||
self._reader.open_resource(self.name), mode, *args, **kwargs
|
||||
)
|
||||
|
||||
class OrphanPath(abc.Traversable):
|
||||
"""
|
||||
Orphan path, not tied to a module spec or resource reader.
|
||||
Can't be read and doesn't expose any meaningful children.
|
||||
"""
|
||||
|
||||
def __init__(self, *path_parts):
|
||||
if len(path_parts) < 1:
|
||||
raise ValueError('Need at least one path part to construct a path')
|
||||
self._path = path_parts
|
||||
|
||||
def iterdir(self):
|
||||
return iter(())
|
||||
|
||||
def is_file(self):
|
||||
return False
|
||||
|
||||
is_dir = is_file
|
||||
|
||||
def joinpath(self, other):
|
||||
return CompatibilityFiles.OrphanPath(*self._path, other)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._path[-1]
|
||||
|
||||
def open(self, mode='r', *args, **kwargs):
|
||||
raise FileNotFoundError("Can't open orphan path")
|
||||
|
||||
def __init__(self, spec):
|
||||
self.spec = spec
|
||||
|
||||
@property
|
||||
def _reader(self):
|
||||
with suppress(AttributeError):
|
||||
return self.spec.loader.get_resource_reader(self.spec.name)
|
||||
|
||||
def _native(self):
|
||||
"""
|
||||
Return the native reader if it supports files().
|
||||
"""
|
||||
reader = self._reader
|
||||
return reader if hasattr(reader, 'files') else self
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self._reader, attr)
|
||||
|
||||
def files(self):
|
||||
return CompatibilityFiles.SpecPath(self.spec, self._reader)
|
||||
|
||||
|
||||
def wrap_spec(package):
|
||||
"""
|
||||
Construct a package spec with traversable compatibility
|
||||
on the spec/loader/reader.
|
||||
"""
|
||||
return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
@ -1,207 +0,0 @@
|
||||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
import functools
|
||||
import contextlib
|
||||
import types
|
||||
import importlib
|
||||
import inspect
|
||||
import warnings
|
||||
import itertools
|
||||
|
||||
from typing import Union, Optional, cast
|
||||
from .abc import ResourceReader, Traversable
|
||||
|
||||
from ._compat import wrap_spec
|
||||
|
||||
Package = Union[types.ModuleType, str]
|
||||
Anchor = Package
|
||||
|
||||
|
||||
def package_to_anchor(func):
|
||||
"""
|
||||
Replace 'package' parameter as 'anchor' and warn about the change.
|
||||
|
||||
Other errors should fall through.
|
||||
|
||||
>>> files('a', 'b')
|
||||
Traceback (most recent call last):
|
||||
TypeError: files() takes from 0 to 1 positional arguments but 2 were given
|
||||
"""
|
||||
undefined = object()
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(anchor=undefined, package=undefined):
|
||||
if package is not undefined:
|
||||
if anchor is not undefined:
|
||||
return func(anchor, package)
|
||||
warnings.warn(
|
||||
"First parameter to files is renamed to 'anchor'",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return func(package)
|
||||
elif anchor is undefined:
|
||||
return func()
|
||||
return func(anchor)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@package_to_anchor
|
||||
def files(anchor: Optional[Anchor] = None) -> Traversable:
|
||||
"""
|
||||
Get a Traversable resource for an anchor.
|
||||
"""
|
||||
return from_package(resolve(anchor))
|
||||
|
||||
|
||||
def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
|
||||
"""
|
||||
Return the package's loader if it's a ResourceReader.
|
||||
"""
|
||||
# We can't use
|
||||
# a issubclass() check here because apparently abc.'s __subclasscheck__()
|
||||
# hook wants to create a weak reference to the object, but
|
||||
# zipimport.zipimporter does not support weak references, resulting in a
|
||||
# TypeError. That seems terrible.
|
||||
spec = package.__spec__
|
||||
reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
|
||||
if reader is None:
|
||||
return None
|
||||
return reader(spec.name) # type: ignore
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def resolve(cand: Optional[Anchor]) -> types.ModuleType:
|
||||
return cast(types.ModuleType, cand)
|
||||
|
||||
|
||||
@resolve.register
|
||||
def _(cand: str) -> types.ModuleType:
|
||||
return importlib.import_module(cand)
|
||||
|
||||
|
||||
@resolve.register
|
||||
def _(cand: None) -> types.ModuleType:
|
||||
return resolve(_infer_caller().f_globals['__name__'])
|
||||
|
||||
|
||||
def _infer_caller():
|
||||
"""
|
||||
Walk the stack and find the frame of the first caller not in this module.
|
||||
"""
|
||||
|
||||
def is_this_file(frame_info):
|
||||
return frame_info.filename == __file__
|
||||
|
||||
def is_wrapper(frame_info):
|
||||
return frame_info.function == 'wrapper'
|
||||
|
||||
not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
|
||||
# also exclude 'wrapper' due to singledispatch in the call stack
|
||||
callers = itertools.filterfalse(is_wrapper, not_this_file)
|
||||
return next(callers).frame
|
||||
|
||||
|
||||
def from_package(package: types.ModuleType):
|
||||
"""
|
||||
Return a Traversable object for the given package.
|
||||
|
||||
"""
|
||||
spec = wrap_spec(package)
|
||||
reader = spec.loader.get_resource_reader(spec.name)
|
||||
return reader.files()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _tempfile(
|
||||
reader,
|
||||
suffix='',
|
||||
# gh-93353: Keep a reference to call os.remove() in late Python
|
||||
# finalization.
|
||||
*,
|
||||
_os_remove=os.remove,
|
||||
):
|
||||
# Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
|
||||
# blocks due to the need to close the temporary file to work on Windows
|
||||
# properly.
|
||||
fd, raw_path = tempfile.mkstemp(suffix=suffix)
|
||||
try:
|
||||
try:
|
||||
os.write(fd, reader())
|
||||
finally:
|
||||
os.close(fd)
|
||||
del reader
|
||||
yield pathlib.Path(raw_path)
|
||||
finally:
|
||||
try:
|
||||
_os_remove(raw_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def _temp_file(path):
|
||||
return _tempfile(path.read_bytes, suffix=path.name)
|
||||
|
||||
|
||||
def _is_present_dir(path: Traversable) -> bool:
|
||||
"""
|
||||
Some Traversables implement ``is_dir()`` to raise an
|
||||
exception (i.e. ``FileNotFoundError``) when the
|
||||
directory doesn't exist. This function wraps that call
|
||||
to always return a boolean and only return True
|
||||
if there's a dir and it exists.
|
||||
"""
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
return path.is_dir()
|
||||
return False
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def as_file(path):
|
||||
"""
|
||||
Given a Traversable object, return that object as a
|
||||
path on the local file system in a context manager.
|
||||
"""
|
||||
return _temp_dir(path) if _is_present_dir(path) else _temp_file(path)
|
||||
|
||||
|
||||
@as_file.register(pathlib.Path)
|
||||
@contextlib.contextmanager
|
||||
def _(path):
|
||||
"""
|
||||
Degenerate behavior for pathlib.Path objects.
|
||||
"""
|
||||
yield path
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _temp_path(dir: tempfile.TemporaryDirectory):
|
||||
"""
|
||||
Wrap tempfile.TemporyDirectory to return a pathlib object.
|
||||
"""
|
||||
with dir as result:
|
||||
yield pathlib.Path(result)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _temp_dir(path):
|
||||
"""
|
||||
Given a traversable dir, recursively replicate the whole tree
|
||||
to the file system in a context manager.
|
||||
"""
|
||||
assert path.is_dir()
|
||||
with _temp_path(tempfile.TemporaryDirectory()) as temp_dir:
|
||||
yield _write_contents(temp_dir, path)
|
||||
|
||||
|
||||
def _write_contents(target, source):
|
||||
child = target.joinpath(source.name)
|
||||
if source.is_dir():
|
||||
child.mkdir()
|
||||
for item in source.iterdir():
|
||||
_write_contents(child, item)
|
||||
else:
|
||||
child.write_bytes(source.read_bytes())
|
||||
return child
|
@ -1,108 +0,0 @@
|
||||
# flake8: noqa
|
||||
|
||||
import abc
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
from contextlib import suppress
|
||||
from typing import Union
|
||||
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from zipfile import Path as ZipPath # type: ignore
|
||||
else:
|
||||
from ..zipp import Path as ZipPath # type: ignore
|
||||
|
||||
|
||||
try:
|
||||
from typing import runtime_checkable # type: ignore
|
||||
except ImportError:
|
||||
|
||||
def runtime_checkable(cls): # type: ignore
|
||||
return cls
|
||||
|
||||
|
||||
try:
|
||||
from typing import Protocol # type: ignore
|
||||
except ImportError:
|
||||
Protocol = abc.ABC # type: ignore
|
||||
|
||||
|
||||
class TraversableResourcesLoader:
|
||||
"""
|
||||
Adapt loaders to provide TraversableResources and other
|
||||
compatibility.
|
||||
|
||||
Used primarily for Python 3.9 and earlier where the native
|
||||
loaders do not yet implement TraversableResources.
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
self.spec = spec
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self.spec.origin
|
||||
|
||||
def get_resource_reader(self, name):
|
||||
from . import readers, _adapters
|
||||
|
||||
def _zip_reader(spec):
|
||||
with suppress(AttributeError):
|
||||
return readers.ZipReader(spec.loader, spec.name)
|
||||
|
||||
def _namespace_reader(spec):
|
||||
with suppress(AttributeError, ValueError):
|
||||
return readers.NamespaceReader(spec.submodule_search_locations)
|
||||
|
||||
def _available_reader(spec):
|
||||
with suppress(AttributeError):
|
||||
return spec.loader.get_resource_reader(spec.name)
|
||||
|
||||
def _native_reader(spec):
|
||||
reader = _available_reader(spec)
|
||||
return reader if hasattr(reader, 'files') else None
|
||||
|
||||
def _file_reader(spec):
|
||||
try:
|
||||
path = pathlib.Path(self.path)
|
||||
except TypeError:
|
||||
return None
|
||||
if path.exists():
|
||||
return readers.FileReader(self)
|
||||
|
||||
return (
|
||||
# native reader if it supplies 'files'
|
||||
_native_reader(self.spec)
|
||||
or
|
||||
# local ZipReader if a zip module
|
||||
_zip_reader(self.spec)
|
||||
or
|
||||
# local NamespaceReader if a namespace module
|
||||
_namespace_reader(self.spec)
|
||||
or
|
||||
# local FileReader
|
||||
_file_reader(self.spec)
|
||||
# fallback - adapt the spec ResourceReader to TraversableReader
|
||||
or _adapters.CompatibilityFiles(self.spec)
|
||||
)
|
||||
|
||||
|
||||
def wrap_spec(package):
|
||||
"""
|
||||
Construct a package spec with traversable compatibility
|
||||
on the spec/loader/reader.
|
||||
|
||||
Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
|
||||
from above for older Python compatibility (<3.10).
|
||||
"""
|
||||
from . import _adapters
|
||||
|
||||
return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
StrPath = Union[str, os.PathLike[str]]
|
||||
else:
|
||||
# PathLike is only subscriptable at runtime in 3.9+
|
||||
StrPath = Union[str, "os.PathLike[str]"]
|
@ -1,35 +0,0 @@
|
||||
from itertools import filterfalse
|
||||
|
||||
from typing import (
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Optional,
|
||||
Set,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
# Type and type variable definitions
|
||||
_T = TypeVar('_T')
|
||||
_U = TypeVar('_U')
|
||||
|
||||
|
||||
def unique_everseen(
|
||||
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
|
||||
) -> Iterator[_T]:
|
||||
"List unique elements, preserving order. Remember all elements ever seen."
|
||||
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
||||
# unique_everseen('ABBCcAD', str.lower) --> A B C D
|
||||
seen: Set[Union[_T, _U]] = set()
|
||||
seen_add = seen.add
|
||||
if key is None:
|
||||
for element in filterfalse(seen.__contains__, iterable):
|
||||
seen_add(element)
|
||||
yield element
|
||||
else:
|
||||
for element in iterable:
|
||||
k = key(element)
|
||||
if k not in seen:
|
||||
seen_add(k)
|
||||
yield element
|
@ -1,120 +0,0 @@
|
||||
import functools
|
||||
import os
|
||||
import pathlib
|
||||
import types
|
||||
import warnings
|
||||
|
||||
from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
|
||||
|
||||
from . import _common
|
||||
|
||||
Package = Union[types.ModuleType, str]
|
||||
Resource = str
|
||||
|
||||
|
||||
def deprecated(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
warnings.warn(
|
||||
f"{func.__name__} is deprecated. Use files() instead. "
|
||||
"Refer to https://importlib-resources.readthedocs.io"
|
||||
"/en/latest/using.html#migrating-from-legacy for migration advice.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def normalize_path(path: Any) -> str:
|
||||
"""Normalize a path by ensuring it is a string.
|
||||
|
||||
If the resulting string contains path separators, an exception is raised.
|
||||
"""
|
||||
str_path = str(path)
|
||||
parent, file_name = os.path.split(str_path)
|
||||
if parent:
|
||||
raise ValueError(f'{path!r} must be only a file name')
|
||||
return file_name
|
||||
|
||||
|
||||
@deprecated
|
||||
def open_binary(package: Package, resource: Resource) -> BinaryIO:
|
||||
"""Return a file-like object opened for binary reading of the resource."""
|
||||
return (_common.files(package) / normalize_path(resource)).open('rb')
|
||||
|
||||
|
||||
@deprecated
|
||||
def read_binary(package: Package, resource: Resource) -> bytes:
|
||||
"""Return the binary contents of the resource."""
|
||||
return (_common.files(package) / normalize_path(resource)).read_bytes()
|
||||
|
||||
|
||||
@deprecated
|
||||
def open_text(
|
||||
package: Package,
|
||||
resource: Resource,
|
||||
encoding: str = 'utf-8',
|
||||
errors: str = 'strict',
|
||||
) -> TextIO:
|
||||
"""Return a file-like object opened for text reading of the resource."""
|
||||
return (_common.files(package) / normalize_path(resource)).open(
|
||||
'r', encoding=encoding, errors=errors
|
||||
)
|
||||
|
||||
|
||||
@deprecated
|
||||
def read_text(
|
||||
package: Package,
|
||||
resource: Resource,
|
||||
encoding: str = 'utf-8',
|
||||
errors: str = 'strict',
|
||||
) -> str:
|
||||
"""Return the decoded string of the resource.
|
||||
|
||||
The decoding-related arguments have the same semantics as those of
|
||||
bytes.decode().
|
||||
"""
|
||||
with open_text(package, resource, encoding, errors) as fp:
|
||||
return fp.read()
|
||||
|
||||
|
||||
@deprecated
|
||||
def contents(package: Package) -> Iterable[str]:
|
||||
"""Return an iterable of entries in `package`.
|
||||
|
||||
Note that not all entries are resources. Specifically, directories are
|
||||
not considered resources. Use `is_resource()` on each entry returned here
|
||||
to check if it is a resource or not.
|
||||
"""
|
||||
return [path.name for path in _common.files(package).iterdir()]
|
||||
|
||||
|
||||
@deprecated
|
||||
def is_resource(package: Package, name: str) -> bool:
|
||||
"""True if `name` is a resource inside `package`.
|
||||
|
||||
Directories are *not* resources.
|
||||
"""
|
||||
resource = normalize_path(name)
|
||||
return any(
|
||||
traversable.name == resource and traversable.is_file()
|
||||
for traversable in _common.files(package).iterdir()
|
||||
)
|
||||
|
||||
|
||||
@deprecated
|
||||
def path(
|
||||
package: Package,
|
||||
resource: Resource,
|
||||
) -> ContextManager[pathlib.Path]:
|
||||
"""A context manager providing a file path object to the resource.
|
||||
|
||||
If the resource does not already exist on its own on the file system,
|
||||
a temporary file will be created. If the file was created, the file
|
||||
will be deleted upon exiting the context manager (no exception is
|
||||
raised if the file was deleted prior to the context manager
|
||||
exiting).
|
||||
"""
|
||||
return _common.as_file(_common.files(package) / normalize_path(resource))
|
@ -1,170 +0,0 @@
|
||||
import abc
|
||||
import io
|
||||
import itertools
|
||||
import pathlib
|
||||
from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
|
||||
|
||||
from ._compat import runtime_checkable, Protocol, StrPath
|
||||
|
||||
|
||||
__all__ = ["ResourceReader", "Traversable", "TraversableResources"]
|
||||
|
||||
|
||||
class ResourceReader(metaclass=abc.ABCMeta):
|
||||
"""Abstract base class for loaders to provide resource reading support."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def open_resource(self, resource: Text) -> BinaryIO:
|
||||
"""Return an opened, file-like object for binary reading.
|
||||
|
||||
The 'resource' argument is expected to represent only a file name.
|
||||
If the resource cannot be found, FileNotFoundError is raised.
|
||||
"""
|
||||
# This deliberately raises FileNotFoundError instead of
|
||||
# NotImplementedError so that if this method is accidentally called,
|
||||
# it'll still do the right thing.
|
||||
raise FileNotFoundError
|
||||
|
||||
@abc.abstractmethod
|
||||
def resource_path(self, resource: Text) -> Text:
|
||||
"""Return the file system path to the specified resource.
|
||||
|
||||
The 'resource' argument is expected to represent only a file name.
|
||||
If the resource does not exist on the file system, raise
|
||||
FileNotFoundError.
|
||||
"""
|
||||
# This deliberately raises FileNotFoundError instead of
|
||||
# NotImplementedError so that if this method is accidentally called,
|
||||
# it'll still do the right thing.
|
||||
raise FileNotFoundError
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_resource(self, path: Text) -> bool:
|
||||
"""Return True if the named 'path' is a resource.
|
||||
|
||||
Files are resources, directories are not.
|
||||
"""
|
||||
raise FileNotFoundError
|
||||
|
||||
@abc.abstractmethod
|
||||
def contents(self) -> Iterable[str]:
|
||||
"""Return an iterable of entries in `package`."""
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
class TraversalError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class Traversable(Protocol):
|
||||
"""
|
||||
An object with a subset of pathlib.Path methods suitable for
|
||||
traversing directories and opening files.
|
||||
|
||||
Any exceptions that occur when accessing the backing resource
|
||||
may propagate unaltered.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def iterdir(self) -> Iterator["Traversable"]:
|
||||
"""
|
||||
Yield Traversable objects in self
|
||||
"""
|
||||
|
||||
def read_bytes(self) -> bytes:
|
||||
"""
|
||||
Read contents of self as bytes
|
||||
"""
|
||||
with self.open('rb') as strm:
|
||||
return strm.read()
|
||||
|
||||
def read_text(self, encoding: Optional[str] = None) -> str:
|
||||
"""
|
||||
Read contents of self as text
|
||||
"""
|
||||
with self.open(encoding=encoding) as strm:
|
||||
return strm.read()
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_dir(self) -> bool:
|
||||
"""
|
||||
Return True if self is a directory
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_file(self) -> bool:
|
||||
"""
|
||||
Return True if self is a file
|
||||
"""
|
||||
|
||||
def joinpath(self, *descendants: StrPath) -> "Traversable":
|
||||
"""
|
||||
Return Traversable resolved with any descendants applied.
|
||||
|
||||
Each descendant should be a path segment relative to self
|
||||
and each may contain multiple levels separated by
|
||||
``posixpath.sep`` (``/``).
|
||||
"""
|
||||
if not descendants:
|
||||
return self
|
||||
names = itertools.chain.from_iterable(
|
||||
path.parts for path in map(pathlib.PurePosixPath, descendants)
|
||||
)
|
||||
target = next(names)
|
||||
matches = (
|
||||
traversable for traversable in self.iterdir() if traversable.name == target
|
||||
)
|
||||
try:
|
||||
match = next(matches)
|
||||
except StopIteration:
|
||||
raise TraversalError(
|
||||
"Target not found during traversal.", target, list(names)
|
||||
)
|
||||
return match.joinpath(*names)
|
||||
|
||||
def __truediv__(self, child: StrPath) -> "Traversable":
|
||||
"""
|
||||
Return Traversable child in self
|
||||
"""
|
||||
return self.joinpath(child)
|
||||
|
||||
@abc.abstractmethod
|
||||
def open(self, mode='r', *args, **kwargs):
|
||||
"""
|
||||
mode may be 'r' or 'rb' to open as text or binary. Return a handle
|
||||
suitable for reading (same as pathlib.Path.open).
|
||||
|
||||
When opening as text, accepts encoding parameters such as those
|
||||
accepted by io.TextIOWrapper.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def name(self) -> str:
|
||||
"""
|
||||
The base name of this object without any parent references.
|
||||
"""
|
||||
|
||||
|
||||
class TraversableResources(ResourceReader):
|
||||
"""
|
||||
The required interface for providing traversable
|
||||
resources.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def files(self) -> "Traversable":
|
||||
"""Return a Traversable object for the loaded package."""
|
||||
|
||||
def open_resource(self, resource: StrPath) -> io.BufferedReader:
|
||||
return self.files().joinpath(resource).open('rb')
|
||||
|
||||
def resource_path(self, resource: Any) -> NoReturn:
|
||||
raise FileNotFoundError(resource)
|
||||
|
||||
def is_resource(self, path: StrPath) -> bool:
|
||||
return self.files().joinpath(path).is_file()
|
||||
|
||||
def contents(self) -> Iterator[str]:
|
||||
return (item.name for item in self.files().iterdir())
|
@ -1,120 +0,0 @@
|
||||
import collections
|
||||
import pathlib
|
||||
import operator
|
||||
|
||||
from . import abc
|
||||
|
||||
from ._itertools import unique_everseen
|
||||
from ._compat import ZipPath
|
||||
|
||||
|
||||
def remove_duplicates(items):
|
||||
return iter(collections.OrderedDict.fromkeys(items))
|
||||
|
||||
|
||||
class FileReader(abc.TraversableResources):
|
||||
def __init__(self, loader):
|
||||
self.path = pathlib.Path(loader.path).parent
|
||||
|
||||
def resource_path(self, resource):
|
||||
"""
|
||||
Return the file system path to prevent
|
||||
`resources.path()` from creating a temporary
|
||||
copy.
|
||||
"""
|
||||
return str(self.path.joinpath(resource))
|
||||
|
||||
def files(self):
|
||||
return self.path
|
||||
|
||||
|
||||
class ZipReader(abc.TraversableResources):
|
||||
def __init__(self, loader, module):
|
||||
_, _, name = module.rpartition('.')
|
||||
self.prefix = loader.prefix.replace('\\', '/') + name + '/'
|
||||
self.archive = loader.archive
|
||||
|
||||
def open_resource(self, resource):
|
||||
try:
|
||||
return super().open_resource(resource)
|
||||
except KeyError as exc:
|
||||
raise FileNotFoundError(exc.args[0])
|
||||
|
||||
def is_resource(self, path):
|
||||
# workaround for `zipfile.Path.is_file` returning true
|
||||
# for non-existent paths.
|
||||
target = self.files().joinpath(path)
|
||||
return target.is_file() and target.exists()
|
||||
|
||||
def files(self):
|
||||
return ZipPath(self.archive, self.prefix)
|
||||
|
||||
|
||||
class MultiplexedPath(abc.Traversable):
|
||||
"""
|
||||
Given a series of Traversable objects, implement a merged
|
||||
version of the interface across all objects. Useful for
|
||||
namespace packages which may be multihomed at a single
|
||||
name.
|
||||
"""
|
||||
|
||||
def __init__(self, *paths):
|
||||
self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
|
||||
if not self._paths:
|
||||
message = 'MultiplexedPath must contain at least one path'
|
||||
raise FileNotFoundError(message)
|
||||
if not all(path.is_dir() for path in self._paths):
|
||||
raise NotADirectoryError('MultiplexedPath only supports directories')
|
||||
|
||||
def iterdir(self):
|
||||
files = (file for path in self._paths for file in path.iterdir())
|
||||
return unique_everseen(files, key=operator.attrgetter('name'))
|
||||
|
||||
def read_bytes(self):
|
||||
raise FileNotFoundError(f'{self} is not a file')
|
||||
|
||||
def read_text(self, *args, **kwargs):
|
||||
raise FileNotFoundError(f'{self} is not a file')
|
||||
|
||||
def is_dir(self):
|
||||
return True
|
||||
|
||||
def is_file(self):
|
||||
return False
|
||||
|
||||
def joinpath(self, *descendants):
|
||||
try:
|
||||
return super().joinpath(*descendants)
|
||||
except abc.TraversalError:
|
||||
# One of the paths did not resolve (a directory does not exist).
|
||||
# Just return something that will not exist.
|
||||
return self._paths[0].joinpath(*descendants)
|
||||
|
||||
def open(self, *args, **kwargs):
|
||||
raise FileNotFoundError(f'{self} is not a file')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._paths[0].name
|
||||
|
||||
def __repr__(self):
|
||||
paths = ', '.join(f"'{path}'" for path in self._paths)
|
||||
return f'MultiplexedPath({paths})'
|
||||
|
||||
|
||||
class NamespaceReader(abc.TraversableResources):
|
||||
def __init__(self, namespace_path):
|
||||
if 'NamespacePath' not in str(namespace_path):
|
||||
raise ValueError('Invalid path')
|
||||
self.path = MultiplexedPath(*list(namespace_path))
|
||||
|
||||
def resource_path(self, resource):
|
||||
"""
|
||||
Return the file system path to prevent
|
||||
`resources.path()` from creating a temporary
|
||||
copy.
|
||||
"""
|
||||
return str(self.path.joinpath(resource))
|
||||
|
||||
def files(self):
|
||||
return self.path
|
@ -1,106 +0,0 @@
|
||||
"""
|
||||
Interface adapters for low-level readers.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import io
|
||||
import itertools
|
||||
from typing import BinaryIO, List
|
||||
|
||||
from .abc import Traversable, TraversableResources
|
||||
|
||||
|
||||
class SimpleReader(abc.ABC):
|
||||
"""
|
||||
The minimum, low-level interface required from a resource
|
||||
provider.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def package(self) -> str:
|
||||
"""
|
||||
The name of the package for which this reader loads resources.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def children(self) -> List['SimpleReader']:
|
||||
"""
|
||||
Obtain an iterable of SimpleReader for available
|
||||
child containers (e.g. directories).
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def resources(self) -> List[str]:
|
||||
"""
|
||||
Obtain available named resources for this virtual package.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def open_binary(self, resource: str) -> BinaryIO:
|
||||
"""
|
||||
Obtain a File-like for a named resource.
|
||||
"""
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.package.split('.')[-1]
|
||||
|
||||
|
||||
class ResourceContainer(Traversable):
|
||||
"""
|
||||
Traversable container for a package's resources via its reader.
|
||||
"""
|
||||
|
||||
def __init__(self, reader: SimpleReader):
|
||||
self.reader = reader
|
||||
|
||||
def is_dir(self):
|
||||
return True
|
||||
|
||||
def is_file(self):
|
||||
return False
|
||||
|
||||
def iterdir(self):
|
||||
files = (ResourceHandle(self, name) for name in self.reader.resources)
|
||||
dirs = map(ResourceContainer, self.reader.children())
|
||||
return itertools.chain(files, dirs)
|
||||
|
||||
def open(self, *args, **kwargs):
|
||||
raise IsADirectoryError()
|
||||
|
||||
|
||||
class ResourceHandle(Traversable):
|
||||
"""
|
||||
Handle to a named resource in a ResourceReader.
|
||||
"""
|
||||
|
||||
def __init__(self, parent: ResourceContainer, name: str):
|
||||
self.parent = parent
|
||||
self.name = name # type: ignore
|
||||
|
||||
def is_file(self):
|
||||
return True
|
||||
|
||||
def is_dir(self):
|
||||
return False
|
||||
|
||||
def open(self, mode='r', *args, **kwargs):
|
||||
stream = self.parent.reader.open_binary(self.name)
|
||||
if 'b' not in mode:
|
||||
stream = io.TextIOWrapper(*args, **kwargs)
|
||||
return stream
|
||||
|
||||
def joinpath(self, name):
|
||||
raise RuntimeError("Cannot traverse into a resource")
|
||||
|
||||
|
||||
class TraversableReader(TraversableResources, SimpleReader):
|
||||
"""
|
||||
A TraversableResources based on SimpleReader. Resource providers
|
||||
may derive from this class to provide the TraversableResources
|
||||
interface by supplying the SimpleReader interface.
|
||||
"""
|
||||
|
||||
def files(self):
|
||||
return ResourceContainer(self)
|
@ -1,361 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Iterator
|
||||
|
||||
|
||||
if sys.version_info < (3, 12):
|
||||
from pkg_resources.extern.backports import tarfile
|
||||
else:
|
||||
import tarfile
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pushd(dir: str | os.PathLike) -> Iterator[str | os.PathLike]:
|
||||
"""
|
||||
>>> tmp_path = getfixture('tmp_path')
|
||||
>>> with pushd(tmp_path):
|
||||
... assert os.getcwd() == os.fspath(tmp_path)
|
||||
>>> assert os.getcwd() != os.fspath(tmp_path)
|
||||
"""
|
||||
|
||||
orig = os.getcwd()
|
||||
os.chdir(dir)
|
||||
try:
|
||||
yield dir
|
||||
finally:
|
||||
os.chdir(orig)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tarball(
|
||||
url, target_dir: str | os.PathLike | None = None
|
||||
) -> Iterator[str | os.PathLike]:
|
||||
"""
|
||||
Get a tarball, extract it, yield, then clean up.
|
||||
|
||||
>>> import urllib.request
|
||||
>>> url = getfixture('tarfile_served')
|
||||
>>> target = getfixture('tmp_path') / 'out'
|
||||
>>> tb = tarball(url, target_dir=target)
|
||||
>>> import pathlib
|
||||
>>> with tb as extracted:
|
||||
... contents = pathlib.Path(extracted, 'contents.txt').read_text(encoding='utf-8')
|
||||
>>> assert not os.path.exists(extracted)
|
||||
"""
|
||||
if target_dir is None:
|
||||
target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
|
||||
# In the tar command, use --strip-components=1 to strip the first path and
|
||||
# then
|
||||
# use -C to cause the files to be extracted to {target_dir}. This ensures
|
||||
# that we always know where the files were extracted.
|
||||
os.mkdir(target_dir)
|
||||
try:
|
||||
req = urllib.request.urlopen(url)
|
||||
with tarfile.open(fileobj=req, mode='r|*') as tf:
|
||||
tf.extractall(path=target_dir, filter=strip_first_component)
|
||||
yield target_dir
|
||||
finally:
|
||||
shutil.rmtree(target_dir)
|
||||
|
||||
|
||||
def strip_first_component(
|
||||
member: tarfile.TarInfo,
|
||||
path,
|
||||
) -> tarfile.TarInfo:
|
||||
_, member.name = member.name.split('/', 1)
|
||||
return member
|
||||
|
||||
|
||||
def _compose(*cmgrs):
|
||||
"""
|
||||
Compose any number of dependent context managers into a single one.
|
||||
|
||||
The last, innermost context manager may take arbitrary arguments, but
|
||||
each successive context manager should accept the result from the
|
||||
previous as a single parameter.
|
||||
|
||||
Like :func:`jaraco.functools.compose`, behavior works from right to
|
||||
left, so the context manager should be indicated from outermost to
|
||||
innermost.
|
||||
|
||||
Example, to create a context manager to change to a temporary
|
||||
directory:
|
||||
|
||||
>>> temp_dir_as_cwd = _compose(pushd, temp_dir)
|
||||
>>> with temp_dir_as_cwd() as dir:
|
||||
... assert os.path.samefile(os.getcwd(), dir)
|
||||
"""
|
||||
|
||||
def compose_two(inner, outer):
|
||||
def composed(*args, **kwargs):
|
||||
with inner(*args, **kwargs) as saved, outer(saved) as res:
|
||||
yield res
|
||||
|
||||
return contextlib.contextmanager(composed)
|
||||
|
||||
return functools.reduce(compose_two, reversed(cmgrs))
|
||||
|
||||
|
||||
tarball_cwd = _compose(pushd, tarball)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tarball_context(*args, **kwargs):
|
||||
warnings.warn(
|
||||
"tarball_context is deprecated. Use tarball or tarball_cwd instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
pushd_ctx = kwargs.pop('pushd', pushd)
|
||||
with tarball(*args, **kwargs) as tball, pushd_ctx(tball) as dir:
|
||||
yield dir
|
||||
|
||||
|
||||
def infer_compression(url):
|
||||
"""
|
||||
Given a URL or filename, infer the compression code for tar.
|
||||
|
||||
>>> infer_compression('http://foo/bar.tar.gz')
|
||||
'z'
|
||||
>>> infer_compression('http://foo/bar.tgz')
|
||||
'z'
|
||||
>>> infer_compression('file.bz')
|
||||
'j'
|
||||
>>> infer_compression('file.xz')
|
||||
'J'
|
||||
"""
|
||||
warnings.warn(
|
||||
"infer_compression is deprecated with no replacement",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
# cheat and just assume it's the last two characters
|
||||
compression_indicator = url[-2:]
|
||||
mapping = dict(gz='z', bz='j', xz='J')
|
||||
# Assume 'z' (gzip) if no match
|
||||
return mapping.get(compression_indicator, 'z')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def temp_dir(remover=shutil.rmtree):
|
||||
"""
|
||||
Create a temporary directory context. Pass a custom remover
|
||||
to override the removal behavior.
|
||||
|
||||
>>> import pathlib
|
||||
>>> with temp_dir() as the_dir:
|
||||
... assert os.path.isdir(the_dir)
|
||||
... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents', encoding='utf-8')
|
||||
>>> assert not os.path.exists(the_dir)
|
||||
"""
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield temp_dir
|
||||
finally:
|
||||
remover(temp_dir)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
|
||||
"""
|
||||
Check out the repo indicated by url.
|
||||
|
||||
If dest_ctx is supplied, it should be a context manager
|
||||
to yield the target directory for the check out.
|
||||
"""
|
||||
exe = 'git' if 'git' in url else 'hg'
|
||||
with dest_ctx() as repo_dir:
|
||||
cmd = [exe, 'clone', url, repo_dir]
|
||||
if branch:
|
||||
cmd.extend(['--branch', branch])
|
||||
devnull = open(os.path.devnull, 'w')
|
||||
stdout = devnull if quiet else None
|
||||
subprocess.check_call(cmd, stdout=stdout)
|
||||
yield repo_dir
|
||||
|
||||
|
||||
def null():
|
||||
"""
|
||||
A null context suitable to stand in for a meaningful context.
|
||||
|
||||
>>> with null() as value:
|
||||
... assert value is None
|
||||
|
||||
This context is most useful when dealing with two or more code
|
||||
branches but only some need a context. Wrap the others in a null
|
||||
context to provide symmetry across all options.
|
||||
"""
|
||||
warnings.warn(
|
||||
"null is deprecated. Use contextlib.nullcontext",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return contextlib.nullcontext()
|
||||
|
||||
|
||||
class ExceptionTrap:
|
||||
"""
|
||||
A context manager that will catch certain exceptions and provide an
|
||||
indication they occurred.
|
||||
|
||||
>>> with ExceptionTrap() as trap:
|
||||
... raise Exception()
|
||||
>>> bool(trap)
|
||||
True
|
||||
|
||||
>>> with ExceptionTrap() as trap:
|
||||
... pass
|
||||
>>> bool(trap)
|
||||
False
|
||||
|
||||
>>> with ExceptionTrap(ValueError) as trap:
|
||||
... raise ValueError("1 + 1 is not 3")
|
||||
>>> bool(trap)
|
||||
True
|
||||
>>> trap.value
|
||||
ValueError('1 + 1 is not 3')
|
||||
>>> trap.tb
|
||||
<traceback object at ...>
|
||||
|
||||
>>> with ExceptionTrap(ValueError) as trap:
|
||||
... raise Exception()
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
Exception
|
||||
|
||||
>>> bool(trap)
|
||||
False
|
||||
"""
|
||||
|
||||
exc_info = None, None, None
|
||||
|
||||
def __init__(self, exceptions=(Exception,)):
|
||||
self.exceptions = exceptions
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self.exc_info[0]
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return self.exc_info[1]
|
||||
|
||||
@property
|
||||
def tb(self):
|
||||
return self.exc_info[2]
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
type = exc_info[0]
|
||||
matches = type and issubclass(type, self.exceptions)
|
||||
if matches:
|
||||
self.exc_info = exc_info
|
||||
return matches
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.type)
|
||||
|
||||
def raises(self, func, *, _test=bool):
|
||||
"""
|
||||
Wrap func and replace the result with the truth
|
||||
value of the trap (True if an exception occurred).
|
||||
|
||||
First, give the decorator an alias to support Python 3.8
|
||||
Syntax.
|
||||
|
||||
>>> raises = ExceptionTrap(ValueError).raises
|
||||
|
||||
Now decorate a function that always fails.
|
||||
|
||||
>>> @raises
|
||||
... def fail():
|
||||
... raise ValueError('failed')
|
||||
>>> fail()
|
||||
True
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
with ExceptionTrap(self.exceptions) as trap:
|
||||
func(*args, **kwargs)
|
||||
return _test(trap)
|
||||
|
||||
return wrapper
|
||||
|
||||
def passes(self, func):
|
||||
"""
|
||||
Wrap func and replace the result with the truth
|
||||
value of the trap (True if no exception).
|
||||
|
||||
First, give the decorator an alias to support Python 3.8
|
||||
Syntax.
|
||||
|
||||
>>> passes = ExceptionTrap(ValueError).passes
|
||||
|
||||
Now decorate a function that always fails.
|
||||
|
||||
>>> @passes
|
||||
... def fail():
|
||||
... raise ValueError('failed')
|
||||
|
||||
>>> fail()
|
||||
False
|
||||
"""
|
||||
return self.raises(func, _test=operator.not_)
|
||||
|
||||
|
||||
class suppress(contextlib.suppress, contextlib.ContextDecorator):
|
||||
"""
|
||||
A version of contextlib.suppress with decorator support.
|
||||
|
||||
>>> @suppress(KeyError)
|
||||
... def key_error():
|
||||
... {}['']
|
||||
>>> key_error()
|
||||
"""
|
||||
|
||||
|
||||
class on_interrupt(contextlib.ContextDecorator):
|
||||
"""
|
||||
Replace a KeyboardInterrupt with SystemExit(1)
|
||||
|
||||
>>> def do_interrupt():
|
||||
... raise KeyboardInterrupt()
|
||||
>>> on_interrupt('error')(do_interrupt)()
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
SystemExit: 1
|
||||
>>> on_interrupt('error', code=255)(do_interrupt)()
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
SystemExit: 255
|
||||
>>> on_interrupt('suppress')(do_interrupt)()
|
||||
>>> with __import__('pytest').raises(KeyboardInterrupt):
|
||||
... on_interrupt('ignore')(do_interrupt)()
|
||||
"""
|
||||
|
||||
def __init__(self, action='error', /, code=1):
|
||||
self.action = action
|
||||
self.code = code
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exctype, excinst, exctb):
|
||||
if exctype is not KeyboardInterrupt or self.action == 'ignore':
|
||||
return
|
||||
elif self.action == 'error':
|
||||
raise SystemExit(self.code) from excinst
|
||||
return self.action == 'suppress'
|
@ -1,633 +0,0 @@
|
||||
import collections.abc
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import operator
|
||||
import time
|
||||
import types
|
||||
import warnings
|
||||
|
||||
import pkg_resources.extern.more_itertools
|
||||
|
||||
|
||||
def compose(*funcs):
|
||||
"""
|
||||
Compose any number of unary functions into a single unary function.
|
||||
|
||||
>>> import textwrap
|
||||
>>> expected = str.strip(textwrap.dedent(compose.__doc__))
|
||||
>>> strip_and_dedent = compose(str.strip, textwrap.dedent)
|
||||
>>> strip_and_dedent(compose.__doc__) == expected
|
||||
True
|
||||
|
||||
Compose also allows the innermost function to take arbitrary arguments.
|
||||
|
||||
>>> round_three = lambda x: round(x, ndigits=3)
|
||||
>>> f = compose(round_three, int.__truediv__)
|
||||
>>> [f(3*x, x+1) for x in range(1,10)]
|
||||
[1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
|
||||
"""
|
||||
|
||||
def compose_two(f1, f2):
|
||||
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
|
||||
|
||||
return functools.reduce(compose_two, funcs)
|
||||
|
||||
|
||||
def once(func):
|
||||
"""
|
||||
Decorate func so it's only ever called the first time.
|
||||
|
||||
This decorator can ensure that an expensive or non-idempotent function
|
||||
will not be expensive on subsequent calls and is idempotent.
|
||||
|
||||
>>> add_three = once(lambda a: a+3)
|
||||
>>> add_three(3)
|
||||
6
|
||||
>>> add_three(9)
|
||||
6
|
||||
>>> add_three('12')
|
||||
6
|
||||
|
||||
To reset the stored value, simply clear the property ``saved_result``.
|
||||
|
||||
>>> del add_three.saved_result
|
||||
>>> add_three(9)
|
||||
12
|
||||
>>> add_three(8)
|
||||
12
|
||||
|
||||
Or invoke 'reset()' on it.
|
||||
|
||||
>>> add_three.reset()
|
||||
>>> add_three(-3)
|
||||
0
|
||||
>>> add_three(0)
|
||||
0
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if not hasattr(wrapper, 'saved_result'):
|
||||
wrapper.saved_result = func(*args, **kwargs)
|
||||
return wrapper.saved_result
|
||||
|
||||
wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
|
||||
return wrapper
|
||||
|
||||
|
||||
def method_cache(method, cache_wrapper=functools.lru_cache()):
|
||||
"""
|
||||
Wrap lru_cache to support storing the cache data in the object instances.
|
||||
|
||||
Abstracts the common paradigm where the method explicitly saves an
|
||||
underscore-prefixed protected property on first call and returns that
|
||||
subsequently.
|
||||
|
||||
>>> class MyClass:
|
||||
... calls = 0
|
||||
...
|
||||
... @method_cache
|
||||
... def method(self, value):
|
||||
... self.calls += 1
|
||||
... return value
|
||||
|
||||
>>> a = MyClass()
|
||||
>>> a.method(3)
|
||||
3
|
||||
>>> for x in range(75):
|
||||
... res = a.method(x)
|
||||
>>> a.calls
|
||||
75
|
||||
|
||||
Note that the apparent behavior will be exactly like that of lru_cache
|
||||
except that the cache is stored on each instance, so values in one
|
||||
instance will not flush values from another, and when an instance is
|
||||
deleted, so are the cached values for that instance.
|
||||
|
||||
>>> b = MyClass()
|
||||
>>> for x in range(35):
|
||||
... res = b.method(x)
|
||||
>>> b.calls
|
||||
35
|
||||
>>> a.method(0)
|
||||
0
|
||||
>>> a.calls
|
||||
75
|
||||
|
||||
Note that if method had been decorated with ``functools.lru_cache()``,
|
||||
a.calls would have been 76 (due to the cached value of 0 having been
|
||||
flushed by the 'b' instance).
|
||||
|
||||
Clear the cache with ``.cache_clear()``
|
||||
|
||||
>>> a.method.cache_clear()
|
||||
|
||||
Same for a method that hasn't yet been called.
|
||||
|
||||
>>> c = MyClass()
|
||||
>>> c.method.cache_clear()
|
||||
|
||||
Another cache wrapper may be supplied:
|
||||
|
||||
>>> cache = functools.lru_cache(maxsize=2)
|
||||
>>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
|
||||
>>> a = MyClass()
|
||||
>>> a.method2()
|
||||
3
|
||||
|
||||
Caution - do not subsequently wrap the method with another decorator, such
|
||||
as ``@property``, which changes the semantics of the function.
|
||||
|
||||
See also
|
||||
http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
|
||||
for another implementation and additional justification.
|
||||
"""
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
# it's the first call, replace the method with a cached, bound method
|
||||
bound_method = types.MethodType(method, self)
|
||||
cached_method = cache_wrapper(bound_method)
|
||||
setattr(self, method.__name__, cached_method)
|
||||
return cached_method(*args, **kwargs)
|
||||
|
||||
# Support cache clear even before cache has been created.
|
||||
wrapper.cache_clear = lambda: None
|
||||
|
||||
return _special_method_cache(method, cache_wrapper) or wrapper
|
||||
|
||||
|
||||
def _special_method_cache(method, cache_wrapper):
|
||||
"""
|
||||
Because Python treats special methods differently, it's not
|
||||
possible to use instance attributes to implement the cached
|
||||
methods.
|
||||
|
||||
Instead, install the wrapper method under a different name
|
||||
and return a simple proxy to that wrapper.
|
||||
|
||||
https://github.com/jaraco/jaraco.functools/issues/5
|
||||
"""
|
||||
name = method.__name__
|
||||
special_names = '__getattr__', '__getitem__'
|
||||
|
||||
if name not in special_names:
|
||||
return None
|
||||
|
||||
wrapper_name = '__cached' + name
|
||||
|
||||
def proxy(self, /, *args, **kwargs):
|
||||
if wrapper_name not in vars(self):
|
||||
bound = types.MethodType(method, self)
|
||||
cache = cache_wrapper(bound)
|
||||
setattr(self, wrapper_name, cache)
|
||||
else:
|
||||
cache = getattr(self, wrapper_name)
|
||||
return cache(*args, **kwargs)
|
||||
|
||||
return proxy
|
||||
|
||||
|
||||
def apply(transform):
|
||||
"""
|
||||
Decorate a function with a transform function that is
|
||||
invoked on results returned from the decorated function.
|
||||
|
||||
>>> @apply(reversed)
|
||||
... def get_numbers(start):
|
||||
... "doc for get_numbers"
|
||||
... return range(start, start+3)
|
||||
>>> list(get_numbers(4))
|
||||
[6, 5, 4]
|
||||
>>> get_numbers.__doc__
|
||||
'doc for get_numbers'
|
||||
"""
|
||||
|
||||
def wrap(func):
|
||||
return functools.wraps(func)(compose(transform, func))
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def result_invoke(action):
|
||||
r"""
|
||||
Decorate a function with an action function that is
|
||||
invoked on the results returned from the decorated
|
||||
function (for its side effect), then return the original
|
||||
result.
|
||||
|
||||
>>> @result_invoke(print)
|
||||
... def add_two(a, b):
|
||||
... return a + b
|
||||
>>> x = add_two(2, 3)
|
||||
5
|
||||
>>> x
|
||||
5
|
||||
"""
|
||||
|
||||
def wrap(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
result = func(*args, **kwargs)
|
||||
action(result)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def invoke(f, /, *args, **kwargs):
|
||||
"""
|
||||
Call a function for its side effect after initialization.
|
||||
|
||||
The benefit of using the decorator instead of simply invoking a function
|
||||
after defining it is that it makes explicit the author's intent for the
|
||||
function to be called immediately. Whereas if one simply calls the
|
||||
function immediately, it's less obvious if that was intentional or
|
||||
incidental. It also avoids repeating the name - the two actions, defining
|
||||
the function and calling it immediately are modeled separately, but linked
|
||||
by the decorator construct.
|
||||
|
||||
The benefit of having a function construct (opposed to just invoking some
|
||||
behavior inline) is to serve as a scope in which the behavior occurs. It
|
||||
avoids polluting the global namespace with local variables, provides an
|
||||
anchor on which to attach documentation (docstring), keeps the behavior
|
||||
logically separated (instead of conceptually separated or not separated at
|
||||
all), and provides potential to re-use the behavior for testing or other
|
||||
purposes.
|
||||
|
||||
This function is named as a pithy way to communicate, "call this function
|
||||
primarily for its side effect", or "while defining this function, also
|
||||
take it aside and call it". It exists because there's no Python construct
|
||||
for "define and call" (nor should there be, as decorators serve this need
|
||||
just fine). The behavior happens immediately and synchronously.
|
||||
|
||||
>>> @invoke
|
||||
... def func(): print("called")
|
||||
called
|
||||
>>> func()
|
||||
called
|
||||
|
||||
Use functools.partial to pass parameters to the initial call
|
||||
|
||||
>>> @functools.partial(invoke, name='bingo')
|
||||
... def func(name): print('called with', name)
|
||||
called with bingo
|
||||
"""
|
||||
f(*args, **kwargs)
|
||||
return f
|
||||
|
||||
|
||||
class Throttler:
|
||||
"""Rate-limit a function (or other callable)."""
|
||||
|
||||
def __init__(self, func, max_rate=float('Inf')):
|
||||
if isinstance(func, Throttler):
|
||||
func = func.func
|
||||
self.func = func
|
||||
self.max_rate = max_rate
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self.last_called = 0
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
self._wait()
|
||||
return self.func(*args, **kwargs)
|
||||
|
||||
def _wait(self):
|
||||
"""Ensure at least 1/max_rate seconds from last call."""
|
||||
elapsed = time.time() - self.last_called
|
||||
must_wait = 1 / self.max_rate - elapsed
|
||||
time.sleep(max(0, must_wait))
|
||||
self.last_called = time.time()
|
||||
|
||||
def __get__(self, obj, owner=None):
|
||||
return first_invoke(self._wait, functools.partial(self.func, obj))
|
||||
|
||||
|
||||
def first_invoke(func1, func2):
|
||||
"""
|
||||
Return a function that when invoked will invoke func1 without
|
||||
any parameters (for its side effect) and then invoke func2
|
||||
with whatever parameters were passed, returning its result.
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
func1()
|
||||
return func2(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
method_caller = first_invoke(
|
||||
lambda: warnings.warn(
|
||||
'`jaraco.functools.method_caller` is deprecated, '
|
||||
'use `operator.methodcaller` instead',
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
),
|
||||
operator.methodcaller,
|
||||
)
|
||||
|
||||
|
||||
def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
|
||||
"""
|
||||
Given a callable func, trap the indicated exceptions
|
||||
for up to 'retries' times, invoking cleanup on the
|
||||
exception. On the final attempt, allow any exceptions
|
||||
to propagate.
|
||||
"""
|
||||
attempts = itertools.count() if retries == float('inf') else range(retries)
|
||||
for _ in attempts:
|
||||
try:
|
||||
return func()
|
||||
except trap:
|
||||
cleanup()
|
||||
|
||||
return func()
|
||||
|
||||
|
||||
def retry(*r_args, **r_kwargs):
|
||||
"""
|
||||
Decorator wrapper for retry_call. Accepts arguments to retry_call
|
||||
except func and then returns a decorator for the decorated function.
|
||||
|
||||
Ex:
|
||||
|
||||
>>> @retry(retries=3)
|
||||
... def my_func(a, b):
|
||||
... "this is my funk"
|
||||
... print(a, b)
|
||||
>>> my_func.__doc__
|
||||
'this is my funk'
|
||||
"""
|
||||
|
||||
def decorate(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*f_args, **f_kwargs):
|
||||
bound = functools.partial(func, *f_args, **f_kwargs)
|
||||
return retry_call(bound, *r_args, **r_kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def print_yielded(func):
|
||||
"""
|
||||
Convert a generator into a function that prints all yielded elements.
|
||||
|
||||
>>> @print_yielded
|
||||
... def x():
|
||||
... yield 3; yield None
|
||||
>>> x()
|
||||
3
|
||||
None
|
||||
"""
|
||||
print_all = functools.partial(map, print)
|
||||
print_results = compose(more_itertools.consume, print_all, func)
|
||||
return functools.wraps(func)(print_results)
|
||||
|
||||
|
||||
def pass_none(func):
|
||||
"""
|
||||
Wrap func so it's not called if its first param is None.
|
||||
|
||||
>>> print_text = pass_none(print)
|
||||
>>> print_text('text')
|
||||
text
|
||||
>>> print_text(None)
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(param, /, *args, **kwargs):
|
||||
if param is not None:
|
||||
return func(param, *args, **kwargs)
|
||||
return None
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def assign_params(func, namespace):
|
||||
"""
|
||||
Assign parameters from namespace where func solicits.
|
||||
|
||||
>>> def func(x, y=3):
|
||||
... print(x, y)
|
||||
>>> assigned = assign_params(func, dict(x=2, z=4))
|
||||
>>> assigned()
|
||||
2 3
|
||||
|
||||
The usual errors are raised if a function doesn't receive
|
||||
its required parameters:
|
||||
|
||||
>>> assigned = assign_params(func, dict(y=3, z=4))
|
||||
>>> assigned()
|
||||
Traceback (most recent call last):
|
||||
TypeError: func() ...argument...
|
||||
|
||||
It even works on methods:
|
||||
|
||||
>>> class Handler:
|
||||
... def meth(self, arg):
|
||||
... print(arg)
|
||||
>>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
|
||||
crystal
|
||||
"""
|
||||
sig = inspect.signature(func)
|
||||
params = sig.parameters.keys()
|
||||
call_ns = {k: namespace[k] for k in params if k in namespace}
|
||||
return functools.partial(func, **call_ns)
|
||||
|
||||
|
||||
def save_method_args(method):
|
||||
"""
|
||||
Wrap a method such that when it is called, the args and kwargs are
|
||||
saved on the method.
|
||||
|
||||
>>> class MyClass:
|
||||
... @save_method_args
|
||||
... def method(self, a, b):
|
||||
... print(a, b)
|
||||
>>> my_ob = MyClass()
|
||||
>>> my_ob.method(1, 2)
|
||||
1 2
|
||||
>>> my_ob._saved_method.args
|
||||
(1, 2)
|
||||
>>> my_ob._saved_method.kwargs
|
||||
{}
|
||||
>>> my_ob.method(a=3, b='foo')
|
||||
3 foo
|
||||
>>> my_ob._saved_method.args
|
||||
()
|
||||
>>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
|
||||
True
|
||||
|
||||
The arguments are stored on the instance, allowing for
|
||||
different instance to save different args.
|
||||
|
||||
>>> your_ob = MyClass()
|
||||
>>> your_ob.method({str('x'): 3}, b=[4])
|
||||
{'x': 3} [4]
|
||||
>>> your_ob._saved_method.args
|
||||
({'x': 3},)
|
||||
>>> my_ob._saved_method.args
|
||||
()
|
||||
"""
|
||||
args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
|
||||
|
||||
@functools.wraps(method)
|
||||
def wrapper(self, /, *args, **kwargs):
|
||||
attr_name = '_saved_' + method.__name__
|
||||
attr = args_and_kwargs(args, kwargs)
|
||||
setattr(self, attr_name, attr)
|
||||
return method(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def except_(*exceptions, replace=None, use=None):
|
||||
"""
|
||||
Replace the indicated exceptions, if raised, with the indicated
|
||||
literal replacement or evaluated expression (if present).
|
||||
|
||||
>>> safe_int = except_(ValueError)(int)
|
||||
>>> safe_int('five')
|
||||
>>> safe_int('5')
|
||||
5
|
||||
|
||||
Specify a literal replacement with ``replace``.
|
||||
|
||||
>>> safe_int_r = except_(ValueError, replace=0)(int)
|
||||
>>> safe_int_r('five')
|
||||
0
|
||||
|
||||
Provide an expression to ``use`` to pass through particular parameters.
|
||||
|
||||
>>> safe_int_pt = except_(ValueError, use='args[0]')(int)
|
||||
>>> safe_int_pt('five')
|
||||
'five'
|
||||
|
||||
"""
|
||||
|
||||
def decorate(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except exceptions:
|
||||
try:
|
||||
return eval(use)
|
||||
except TypeError:
|
||||
return replace
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def identity(x):
|
||||
"""
|
||||
Return the argument.
|
||||
|
||||
>>> o = object()
|
||||
>>> identity(o) is o
|
||||
True
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
def bypass_when(check, *, _op=identity):
|
||||
"""
|
||||
Decorate a function to return its parameter when ``check``.
|
||||
|
||||
>>> bypassed = [] # False
|
||||
|
||||
>>> @bypass_when(bypassed)
|
||||
... def double(x):
|
||||
... return x * 2
|
||||
>>> double(2)
|
||||
4
|
||||
>>> bypassed[:] = [object()] # True
|
||||
>>> double(2)
|
||||
2
|
||||
"""
|
||||
|
||||
def decorate(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(param, /):
|
||||
return param if _op(check) else func(param)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def bypass_unless(check):
|
||||
"""
|
||||
Decorate a function to return its parameter unless ``check``.
|
||||
|
||||
>>> enabled = [object()] # True
|
||||
|
||||
>>> @bypass_unless(enabled)
|
||||
... def double(x):
|
||||
... return x * 2
|
||||
>>> double(2)
|
||||
4
|
||||
>>> del enabled[:] # False
|
||||
>>> double(2)
|
||||
2
|
||||
"""
|
||||
return bypass_when(check, _op=operator.not_)
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def _splat_inner(args, func):
|
||||
"""Splat args to func."""
|
||||
return func(*args)
|
||||
|
||||
|
||||
@_splat_inner.register
|
||||
def _(args: collections.abc.Mapping, func):
|
||||
"""Splat kargs to func as kwargs."""
|
||||
return func(**args)
|
||||
|
||||
|
||||
def splat(func):
|
||||
"""
|
||||
Wrap func to expect its parameters to be passed positionally in a tuple.
|
||||
|
||||
Has a similar effect to that of ``itertools.starmap`` over
|
||||
simple ``map``.
|
||||
|
||||
>>> pairs = [(-1, 1), (0, 2)]
|
||||
>>> pkg_resources.extern.more_itertools.consume(itertools.starmap(print, pairs))
|
||||
-1 1
|
||||
0 2
|
||||
>>> pkg_resources.extern.more_itertools.consume(map(splat(print), pairs))
|
||||
-1 1
|
||||
0 2
|
||||
|
||||
The approach generalizes to other iterators that don't have a "star"
|
||||
equivalent, such as a "starfilter".
|
||||
|
||||
>>> list(filter(splat(operator.add), pairs))
|
||||
[(0, 2)]
|
||||
|
||||
Splat also accepts a mapping argument.
|
||||
|
||||
>>> def is_nice(msg, code):
|
||||
... return "smile" in msg or code == 0
|
||||
>>> msgs = [
|
||||
... dict(msg='smile!', code=20),
|
||||
... dict(msg='error :(', code=1),
|
||||
... dict(msg='unknown', code=0),
|
||||
... ]
|
||||
>>> for msg in filter(splat(is_nice), msgs):
|
||||
... print(msg)
|
||||
{'msg': 'smile!', 'code': 20}
|
||||
{'msg': 'unknown', 'code': 0}
|
||||
"""
|
||||
return functools.wraps(func)(functools.partial(_splat_inner, func=func))
|
@ -1,128 +0,0 @@
|
||||
from collections.abc import Callable, Hashable, Iterator
|
||||
from functools import partial
|
||||
from operator import methodcaller
|
||||
import sys
|
||||
from typing import (
|
||||
Any,
|
||||
Generic,
|
||||
Protocol,
|
||||
TypeVar,
|
||||
overload,
|
||||
)
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import Concatenate, ParamSpec
|
||||
else:
|
||||
from typing_extensions import Concatenate, ParamSpec
|
||||
|
||||
_P = ParamSpec('_P')
|
||||
_R = TypeVar('_R')
|
||||
_T = TypeVar('_T')
|
||||
_R1 = TypeVar('_R1')
|
||||
_R2 = TypeVar('_R2')
|
||||
_V = TypeVar('_V')
|
||||
_S = TypeVar('_S')
|
||||
_R_co = TypeVar('_R_co', covariant=True)
|
||||
|
||||
class _OnceCallable(Protocol[_P, _R]):
|
||||
saved_result: _R
|
||||
reset: Callable[[], None]
|
||||
def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
|
||||
|
||||
class _ProxyMethodCacheWrapper(Protocol[_R_co]):
|
||||
cache_clear: Callable[[], None]
|
||||
def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
|
||||
|
||||
class _MethodCacheWrapper(Protocol[_R_co]):
|
||||
def cache_clear(self) -> None: ...
|
||||
def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
|
||||
|
||||
# `compose()` overloads below will cover most use cases.
|
||||
|
||||
@overload
|
||||
def compose(
|
||||
__func1: Callable[[_R], _T],
|
||||
__func2: Callable[_P, _R],
|
||||
/,
|
||||
) -> Callable[_P, _T]: ...
|
||||
@overload
|
||||
def compose(
|
||||
__func1: Callable[[_R], _T],
|
||||
__func2: Callable[[_R1], _R],
|
||||
__func3: Callable[_P, _R1],
|
||||
/,
|
||||
) -> Callable[_P, _T]: ...
|
||||
@overload
|
||||
def compose(
|
||||
__func1: Callable[[_R], _T],
|
||||
__func2: Callable[[_R2], _R],
|
||||
__func3: Callable[[_R1], _R2],
|
||||
__func4: Callable[_P, _R1],
|
||||
/,
|
||||
) -> Callable[_P, _T]: ...
|
||||
def once(func: Callable[_P, _R]) -> _OnceCallable[_P, _R]: ...
|
||||
def method_cache(
|
||||
method: Callable[..., _R],
|
||||
cache_wrapper: Callable[[Callable[..., _R]], _MethodCacheWrapper[_R]] = ...,
|
||||
) -> _MethodCacheWrapper[_R] | _ProxyMethodCacheWrapper[_R]: ...
|
||||
def apply(
|
||||
transform: Callable[[_R], _T]
|
||||
) -> Callable[[Callable[_P, _R]], Callable[_P, _T]]: ...
|
||||
def result_invoke(
|
||||
action: Callable[[_R], Any]
|
||||
) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ...
|
||||
def invoke(
|
||||
f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
|
||||
) -> Callable[_P, _R]: ...
|
||||
def call_aside(
|
||||
f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
|
||||
) -> Callable[_P, _R]: ...
|
||||
|
||||
class Throttler(Generic[_R]):
|
||||
last_called: float
|
||||
func: Callable[..., _R]
|
||||
max_rate: float
|
||||
def __init__(
|
||||
self, func: Callable[..., _R] | Throttler[_R], max_rate: float = ...
|
||||
) -> None: ...
|
||||
def reset(self) -> None: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> _R: ...
|
||||
def __get__(self, obj: Any, owner: type[Any] | None = ...) -> Callable[..., _R]: ...
|
||||
|
||||
def first_invoke(
|
||||
func1: Callable[..., Any], func2: Callable[_P, _R]
|
||||
) -> Callable[_P, _R]: ...
|
||||
|
||||
method_caller: Callable[..., methodcaller]
|
||||
|
||||
def retry_call(
|
||||
func: Callable[..., _R],
|
||||
cleanup: Callable[..., None] = ...,
|
||||
retries: int | float = ...,
|
||||
trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
|
||||
) -> _R: ...
|
||||
def retry(
|
||||
cleanup: Callable[..., None] = ...,
|
||||
retries: int | float = ...,
|
||||
trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
|
||||
) -> Callable[[Callable[..., _R]], Callable[..., _R]]: ...
|
||||
def print_yielded(func: Callable[_P, Iterator[Any]]) -> Callable[_P, None]: ...
|
||||
def pass_none(
|
||||
func: Callable[Concatenate[_T, _P], _R]
|
||||
) -> Callable[Concatenate[_T, _P], _R]: ...
|
||||
def assign_params(
|
||||
func: Callable[..., _R], namespace: dict[str, Any]
|
||||
) -> partial[_R]: ...
|
||||
def save_method_args(
|
||||
method: Callable[Concatenate[_S, _P], _R]
|
||||
) -> Callable[Concatenate[_S, _P], _R]: ...
|
||||
def except_(
|
||||
*exceptions: type[BaseException], replace: Any = ..., use: Any = ...
|
||||
) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ...
|
||||
def identity(x: _T) -> _T: ...
|
||||
def bypass_when(
|
||||
check: _V, *, _op: Callable[[_V], Any] = ...
|
||||
) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
|
||||
def bypass_unless(
|
||||
check: Any,
|
||||
) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
|
@ -1,599 +0,0 @@
|
||||
import re
|
||||
import itertools
|
||||
import textwrap
|
||||
import functools
|
||||
|
||||
try:
|
||||
from importlib.resources import files # type: ignore
|
||||
except ImportError: # pragma: nocover
|
||||
from pkg_resources.extern.importlib_resources import files # type: ignore
|
||||
|
||||
from pkg_resources.extern.jaraco.functools import compose, method_cache
|
||||
from pkg_resources.extern.jaraco.context import ExceptionTrap
|
||||
|
||||
|
||||
def substitution(old, new):
|
||||
"""
|
||||
Return a function that will perform a substitution on a string
|
||||
"""
|
||||
return lambda s: s.replace(old, new)
|
||||
|
||||
|
||||
def multi_substitution(*substitutions):
|
||||
"""
|
||||
Take a sequence of pairs specifying substitutions, and create
|
||||
a function that performs those substitutions.
|
||||
|
||||
>>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
|
||||
'baz'
|
||||
"""
|
||||
substitutions = itertools.starmap(substitution, substitutions)
|
||||
# compose function applies last function first, so reverse the
|
||||
# substitutions to get the expected order.
|
||||
substitutions = reversed(tuple(substitutions))
|
||||
return compose(*substitutions)
|
||||
|
||||
|
||||
class FoldedCase(str):
|
||||
"""
|
||||
A case insensitive string class; behaves just like str
|
||||
except compares equal when the only variation is case.
|
||||
|
||||
>>> s = FoldedCase('hello world')
|
||||
|
||||
>>> s == 'Hello World'
|
||||
True
|
||||
|
||||
>>> 'Hello World' == s
|
||||
True
|
||||
|
||||
>>> s != 'Hello World'
|
||||
False
|
||||
|
||||
>>> s.index('O')
|
||||
4
|
||||
|
||||
>>> s.split('O')
|
||||
['hell', ' w', 'rld']
|
||||
|
||||
>>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
|
||||
['alpha', 'Beta', 'GAMMA']
|
||||
|
||||
Sequence membership is straightforward.
|
||||
|
||||
>>> "Hello World" in [s]
|
||||
True
|
||||
>>> s in ["Hello World"]
|
||||
True
|
||||
|
||||
You may test for set inclusion, but candidate and elements
|
||||
must both be folded.
|
||||
|
||||
>>> FoldedCase("Hello World") in {s}
|
||||
True
|
||||
>>> s in {FoldedCase("Hello World")}
|
||||
True
|
||||
|
||||
String inclusion works as long as the FoldedCase object
|
||||
is on the right.
|
||||
|
||||
>>> "hello" in FoldedCase("Hello World")
|
||||
True
|
||||
|
||||
But not if the FoldedCase object is on the left:
|
||||
|
||||
>>> FoldedCase('hello') in 'Hello World'
|
||||
False
|
||||
|
||||
In that case, use ``in_``:
|
||||
|
||||
>>> FoldedCase('hello').in_('Hello World')
|
||||
True
|
||||
|
||||
>>> FoldedCase('hello') > FoldedCase('Hello')
|
||||
False
|
||||
"""
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.lower() < other.lower()
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.lower() > other.lower()
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.lower() == other.lower()
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.lower() != other.lower()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.lower())
|
||||
|
||||
def __contains__(self, other):
|
||||
return super().lower().__contains__(other.lower())
|
||||
|
||||
def in_(self, other):
|
||||
"Does self appear in other?"
|
||||
return self in FoldedCase(other)
|
||||
|
||||
# cache lower since it's likely to be called frequently.
|
||||
@method_cache
|
||||
def lower(self):
|
||||
return super().lower()
|
||||
|
||||
def index(self, sub):
|
||||
return self.lower().index(sub.lower())
|
||||
|
||||
def split(self, splitter=' ', maxsplit=0):
|
||||
pattern = re.compile(re.escape(splitter), re.I)
|
||||
return pattern.split(self, maxsplit)
|
||||
|
||||
|
||||
# Python 3.8 compatibility
|
||||
_unicode_trap = ExceptionTrap(UnicodeDecodeError)
|
||||
|
||||
|
||||
@_unicode_trap.passes
|
||||
def is_decodable(value):
|
||||
r"""
|
||||
Return True if the supplied value is decodable (using the default
|
||||
encoding).
|
||||
|
||||
>>> is_decodable(b'\xff')
|
||||
False
|
||||
>>> is_decodable(b'\x32')
|
||||
True
|
||||
"""
|
||||
value.decode()
|
||||
|
||||
|
||||
def is_binary(value):
|
||||
r"""
|
||||
Return True if the value appears to be binary (that is, it's a byte
|
||||
string and isn't decodable).
|
||||
|
||||
>>> is_binary(b'\xff')
|
||||
True
|
||||
>>> is_binary('\xff')
|
||||
False
|
||||
"""
|
||||
return isinstance(value, bytes) and not is_decodable(value)
|
||||
|
||||
|
||||
def trim(s):
|
||||
r"""
|
||||
Trim something like a docstring to remove the whitespace that
|
||||
is common due to indentation and formatting.
|
||||
|
||||
>>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
|
||||
'foo = bar\n\tbar = baz'
|
||||
"""
|
||||
return textwrap.dedent(s).strip()
|
||||
|
||||
|
||||
def wrap(s):
|
||||
"""
|
||||
Wrap lines of text, retaining existing newlines as
|
||||
paragraph markers.
|
||||
|
||||
>>> print(wrap(lorem_ipsum))
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
|
||||
eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
|
||||
minim veniam, quis nostrud exercitation ullamco laboris nisi ut
|
||||
aliquip ex ea commodo consequat. Duis aute irure dolor in
|
||||
reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
|
||||
pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
|
||||
culpa qui officia deserunt mollit anim id est laborum.
|
||||
<BLANKLINE>
|
||||
Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
|
||||
varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
|
||||
magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
|
||||
gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
|
||||
risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
|
||||
eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
|
||||
fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
|
||||
a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
|
||||
neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
|
||||
sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
|
||||
nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
|
||||
quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
|
||||
molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
|
||||
"""
|
||||
paragraphs = s.splitlines()
|
||||
wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
|
||||
return '\n\n'.join(wrapped)
|
||||
|
||||
|
||||
def unwrap(s):
|
||||
r"""
|
||||
Given a multi-line string, return an unwrapped version.
|
||||
|
||||
>>> wrapped = wrap(lorem_ipsum)
|
||||
>>> wrapped.count('\n')
|
||||
20
|
||||
>>> unwrapped = unwrap(wrapped)
|
||||
>>> unwrapped.count('\n')
|
||||
1
|
||||
>>> print(unwrapped)
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing ...
|
||||
Curabitur pretium tincidunt lacus. Nulla gravida orci ...
|
||||
|
||||
"""
|
||||
paragraphs = re.split(r'\n\n+', s)
|
||||
cleaned = (para.replace('\n', ' ') for para in paragraphs)
|
||||
return '\n'.join(cleaned)
|
||||
|
||||
|
||||
|
||||
|
||||
class Splitter(object):
|
||||
"""object that will split a string with the given arguments for each call
|
||||
|
||||
>>> s = Splitter(',')
|
||||
>>> s('hello, world, this is your, master calling')
|
||||
['hello', ' world', ' this is your', ' master calling']
|
||||
"""
|
||||
|
||||
def __init__(self, *args):
|
||||
self.args = args
|
||||
|
||||
def __call__(self, s):
|
||||
return s.split(*self.args)
|
||||
|
||||
|
||||
def indent(string, prefix=' ' * 4):
|
||||
"""
|
||||
>>> indent('foo')
|
||||
' foo'
|
||||
"""
|
||||
return prefix + string
|
||||
|
||||
|
||||
class WordSet(tuple):
|
||||
"""
|
||||
Given an identifier, return the words that identifier represents,
|
||||
whether in camel case, underscore-separated, etc.
|
||||
|
||||
>>> WordSet.parse("camelCase")
|
||||
('camel', 'Case')
|
||||
|
||||
>>> WordSet.parse("under_sep")
|
||||
('under', 'sep')
|
||||
|
||||
Acronyms should be retained
|
||||
|
||||
>>> WordSet.parse("firstSNL")
|
||||
('first', 'SNL')
|
||||
|
||||
>>> WordSet.parse("you_and_I")
|
||||
('you', 'and', 'I')
|
||||
|
||||
>>> WordSet.parse("A simple test")
|
||||
('A', 'simple', 'test')
|
||||
|
||||
Multiple caps should not interfere with the first cap of another word.
|
||||
|
||||
>>> WordSet.parse("myABCClass")
|
||||
('my', 'ABC', 'Class')
|
||||
|
||||
The result is a WordSet, so you can get the form you need.
|
||||
|
||||
>>> WordSet.parse("myABCClass").underscore_separated()
|
||||
'my_ABC_Class'
|
||||
|
||||
>>> WordSet.parse('a-command').camel_case()
|
||||
'ACommand'
|
||||
|
||||
>>> WordSet.parse('someIdentifier').lowered().space_separated()
|
||||
'some identifier'
|
||||
|
||||
Slices of the result should return another WordSet.
|
||||
|
||||
>>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
|
||||
'out_of_context'
|
||||
|
||||
>>> WordSet.from_class_name(WordSet()).lowered().space_separated()
|
||||
'word set'
|
||||
|
||||
>>> example = WordSet.parse('figured it out')
|
||||
>>> example.headless_camel_case()
|
||||
'figuredItOut'
|
||||
>>> example.dash_separated()
|
||||
'figured-it-out'
|
||||
|
||||
"""
|
||||
|
||||
_pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
|
||||
|
||||
def capitalized(self):
|
||||
return WordSet(word.capitalize() for word in self)
|
||||
|
||||
def lowered(self):
|
||||
return WordSet(word.lower() for word in self)
|
||||
|
||||
def camel_case(self):
|
||||
return ''.join(self.capitalized())
|
||||
|
||||
def headless_camel_case(self):
|
||||
words = iter(self)
|
||||
first = next(words).lower()
|
||||
new_words = itertools.chain((first,), WordSet(words).camel_case())
|
||||
return ''.join(new_words)
|
||||
|
||||
def underscore_separated(self):
|
||||
return '_'.join(self)
|
||||
|
||||
def dash_separated(self):
|
||||
return '-'.join(self)
|
||||
|
||||
def space_separated(self):
|
||||
return ' '.join(self)
|
||||
|
||||
def trim_right(self, item):
|
||||
"""
|
||||
Remove the item from the end of the set.
|
||||
|
||||
>>> WordSet.parse('foo bar').trim_right('foo')
|
||||
('foo', 'bar')
|
||||
>>> WordSet.parse('foo bar').trim_right('bar')
|
||||
('foo',)
|
||||
>>> WordSet.parse('').trim_right('bar')
|
||||
()
|
||||
"""
|
||||
return self[:-1] if self and self[-1] == item else self
|
||||
|
||||
def trim_left(self, item):
|
||||
"""
|
||||
Remove the item from the beginning of the set.
|
||||
|
||||
>>> WordSet.parse('foo bar').trim_left('foo')
|
||||
('bar',)
|
||||
>>> WordSet.parse('foo bar').trim_left('bar')
|
||||
('foo', 'bar')
|
||||
>>> WordSet.parse('').trim_left('bar')
|
||||
()
|
||||
"""
|
||||
return self[1:] if self and self[0] == item else self
|
||||
|
||||
def trim(self, item):
|
||||
"""
|
||||
>>> WordSet.parse('foo bar').trim('foo')
|
||||
('bar',)
|
||||
"""
|
||||
return self.trim_left(item).trim_right(item)
|
||||
|
||||
def __getitem__(self, item):
|
||||
result = super(WordSet, self).__getitem__(item)
|
||||
if isinstance(item, slice):
|
||||
result = WordSet(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def parse(cls, identifier):
|
||||
matches = cls._pattern.finditer(identifier)
|
||||
return WordSet(match.group(0) for match in matches)
|
||||
|
||||
@classmethod
|
||||
def from_class_name(cls, subject):
|
||||
return cls.parse(subject.__class__.__name__)
|
||||
|
||||
|
||||
# for backward compatibility
|
||||
words = WordSet.parse
|
||||
|
||||
|
||||
def simple_html_strip(s):
|
||||
r"""
|
||||
Remove HTML from the string `s`.
|
||||
|
||||
>>> str(simple_html_strip(''))
|
||||
''
|
||||
|
||||
>>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
|
||||
A stormy day in paradise
|
||||
|
||||
>>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
|
||||
Somebody tell the truth.
|
||||
|
||||
>>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
|
||||
What about
|
||||
multiple lines?
|
||||
"""
|
||||
html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
|
||||
texts = (match.group(3) or '' for match in html_stripper.finditer(s))
|
||||
return ''.join(texts)
|
||||
|
||||
|
||||
class SeparatedValues(str):
|
||||
"""
|
||||
A string separated by a separator. Overrides __iter__ for getting
|
||||
the values.
|
||||
|
||||
>>> list(SeparatedValues('a,b,c'))
|
||||
['a', 'b', 'c']
|
||||
|
||||
Whitespace is stripped and empty values are discarded.
|
||||
|
||||
>>> list(SeparatedValues(' a, b , c, '))
|
||||
['a', 'b', 'c']
|
||||
"""
|
||||
|
||||
separator = ','
|
||||
|
||||
def __iter__(self):
|
||||
parts = self.split(self.separator)
|
||||
return filter(None, (part.strip() for part in parts))
|
||||
|
||||
|
||||
class Stripper:
|
||||
r"""
|
||||
Given a series of lines, find the common prefix and strip it from them.
|
||||
|
||||
>>> lines = [
|
||||
... 'abcdefg\n',
|
||||
... 'abc\n',
|
||||
... 'abcde\n',
|
||||
... ]
|
||||
>>> res = Stripper.strip_prefix(lines)
|
||||
>>> res.prefix
|
||||
'abc'
|
||||
>>> list(res.lines)
|
||||
['defg\n', '\n', 'de\n']
|
||||
|
||||
If no prefix is common, nothing should be stripped.
|
||||
|
||||
>>> lines = [
|
||||
... 'abcd\n',
|
||||
... '1234\n',
|
||||
... ]
|
||||
>>> res = Stripper.strip_prefix(lines)
|
||||
>>> res.prefix = ''
|
||||
>>> list(res.lines)
|
||||
['abcd\n', '1234\n']
|
||||
"""
|
||||
|
||||
def __init__(self, prefix, lines):
|
||||
self.prefix = prefix
|
||||
self.lines = map(self, lines)
|
||||
|
||||
@classmethod
|
||||
def strip_prefix(cls, lines):
|
||||
prefix_lines, lines = itertools.tee(lines)
|
||||
prefix = functools.reduce(cls.common_prefix, prefix_lines)
|
||||
return cls(prefix, lines)
|
||||
|
||||
def __call__(self, line):
|
||||
if not self.prefix:
|
||||
return line
|
||||
null, prefix, rest = line.partition(self.prefix)
|
||||
return rest
|
||||
|
||||
@staticmethod
|
||||
def common_prefix(s1, s2):
|
||||
"""
|
||||
Return the common prefix of two lines.
|
||||
"""
|
||||
index = min(len(s1), len(s2))
|
||||
while s1[:index] != s2[:index]:
|
||||
index -= 1
|
||||
return s1[:index]
|
||||
|
||||
|
||||
def remove_prefix(text, prefix):
|
||||
"""
|
||||
Remove the prefix from the text if it exists.
|
||||
|
||||
>>> remove_prefix('underwhelming performance', 'underwhelming ')
|
||||
'performance'
|
||||
|
||||
>>> remove_prefix('something special', 'sample')
|
||||
'something special'
|
||||
"""
|
||||
null, prefix, rest = text.rpartition(prefix)
|
||||
return rest
|
||||
|
||||
|
||||
def remove_suffix(text, suffix):
|
||||
"""
|
||||
Remove the suffix from the text if it exists.
|
||||
|
||||
>>> remove_suffix('name.git', '.git')
|
||||
'name'
|
||||
|
||||
>>> remove_suffix('something special', 'sample')
|
||||
'something special'
|
||||
"""
|
||||
rest, suffix, null = text.partition(suffix)
|
||||
return rest
|
||||
|
||||
|
||||
def normalize_newlines(text):
|
||||
r"""
|
||||
Replace alternate newlines with the canonical newline.
|
||||
|
||||
>>> normalize_newlines('Lorem Ipsum\u2029')
|
||||
'Lorem Ipsum\n'
|
||||
>>> normalize_newlines('Lorem Ipsum\r\n')
|
||||
'Lorem Ipsum\n'
|
||||
>>> normalize_newlines('Lorem Ipsum\x85')
|
||||
'Lorem Ipsum\n'
|
||||
"""
|
||||
newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
|
||||
pattern = '|'.join(newlines)
|
||||
return re.sub(pattern, '\n', text)
|
||||
|
||||
|
||||
def _nonblank(str):
|
||||
return str and not str.startswith('#')
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def yield_lines(iterable):
|
||||
r"""
|
||||
Yield valid lines of a string or iterable.
|
||||
|
||||
>>> list(yield_lines(''))
|
||||
[]
|
||||
>>> list(yield_lines(['foo', 'bar']))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('foo\nbar'))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
|
||||
['foo', 'baz #comment']
|
||||
>>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
|
||||
['foo', 'bar', 'baz', 'bing']
|
||||
"""
|
||||
return itertools.chain.from_iterable(map(yield_lines, iterable))
|
||||
|
||||
|
||||
@yield_lines.register(str)
|
||||
def _(text):
|
||||
return filter(_nonblank, map(str.strip, text.splitlines()))
|
||||
|
||||
|
||||
def drop_comment(line):
|
||||
"""
|
||||
Drop comments.
|
||||
|
||||
>>> drop_comment('foo # bar')
|
||||
'foo'
|
||||
|
||||
A hash without a space may be in a URL.
|
||||
|
||||
>>> drop_comment('http://example.com/foo#bar')
|
||||
'http://example.com/foo#bar'
|
||||
"""
|
||||
return line.partition(' #')[0]
|
||||
|
||||
|
||||
def join_continuation(lines):
|
||||
r"""
|
||||
Join lines continued by a trailing backslash.
|
||||
|
||||
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
||||
['foobar', 'baz']
|
||||
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
||||
['foobar', 'baz']
|
||||
>>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
|
||||
['foobarbaz']
|
||||
|
||||
Not sure why, but...
|
||||
The character preceeding the backslash is also elided.
|
||||
|
||||
>>> list(join_continuation(['goo\\', 'dly']))
|
||||
['godly']
|
||||
|
||||
A terrible idea, but...
|
||||
If no line is available to continue, suppress the lines.
|
||||
|
||||
>>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
|
||||
['foo']
|
||||
"""
|
||||
lines = iter(lines)
|
||||
for item in lines:
|
||||
while item.endswith('\\'):
|
||||
try:
|
||||
item = item[:-2].strip() + next(lines)
|
||||
except StopIteration:
|
||||
return
|
||||
yield item
|
@ -1,6 +0,0 @@
|
||||
"""More routines for operating on iterables, beyond itertools"""
|
||||
|
||||
from .more import * # noqa
|
||||
from .recipes import * # noqa
|
||||
|
||||
__version__ = '10.2.0'
|
@ -1,2 +0,0 @@
|
||||
from .more import *
|
||||
from .recipes import *
|
File diff suppressed because it is too large
Load Diff
@ -1,695 +0,0 @@
|
||||
"""Stubs for more_itertools.more"""
|
||||
from __future__ import annotations
|
||||
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Container,
|
||||
ContextManager,
|
||||
Generic,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
overload,
|
||||
Reversible,
|
||||
Sequence,
|
||||
Sized,
|
||||
Type,
|
||||
TypeVar,
|
||||
type_check_only,
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
# Type and type variable definitions
|
||||
_T = TypeVar('_T')
|
||||
_T1 = TypeVar('_T1')
|
||||
_T2 = TypeVar('_T2')
|
||||
_U = TypeVar('_U')
|
||||
_V = TypeVar('_V')
|
||||
_W = TypeVar('_W')
|
||||
_T_co = TypeVar('_T_co', covariant=True)
|
||||
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
|
||||
_Raisable = BaseException | Type[BaseException]
|
||||
|
||||
@type_check_only
|
||||
class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
|
||||
|
||||
@type_check_only
|
||||
class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ...
|
||||
|
||||
@type_check_only
|
||||
class _SupportsSlicing(Protocol[_T_co]):
|
||||
def __getitem__(self, __k: slice) -> _T_co: ...
|
||||
|
||||
def chunked(
|
||||
iterable: Iterable[_T], n: int | None, strict: bool = ...
|
||||
) -> Iterator[list[_T]]: ...
|
||||
@overload
|
||||
def first(iterable: Iterable[_T]) -> _T: ...
|
||||
@overload
|
||||
def first(iterable: Iterable[_T], default: _U) -> _T | _U: ...
|
||||
@overload
|
||||
def last(iterable: Iterable[_T]) -> _T: ...
|
||||
@overload
|
||||
def last(iterable: Iterable[_T], default: _U) -> _T | _U: ...
|
||||
@overload
|
||||
def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ...
|
||||
@overload
|
||||
def nth_or_last(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
|
||||
|
||||
class peekable(Generic[_T], Iterator[_T]):
|
||||
def __init__(self, iterable: Iterable[_T]) -> None: ...
|
||||
def __iter__(self) -> peekable[_T]: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
@overload
|
||||
def peek(self) -> _T: ...
|
||||
@overload
|
||||
def peek(self, default: _U) -> _T | _U: ...
|
||||
def prepend(self, *items: _T) -> None: ...
|
||||
def __next__(self) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
|
||||
def consumer(func: _GenFn) -> _GenFn: ...
|
||||
def ilen(iterable: Iterable[_T]) -> int: ...
|
||||
def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
|
||||
def with_iter(
|
||||
context_manager: ContextManager[Iterable[_T]],
|
||||
) -> Iterator[_T]: ...
|
||||
def one(
|
||||
iterable: Iterable[_T],
|
||||
too_short: _Raisable | None = ...,
|
||||
too_long: _Raisable | None = ...,
|
||||
) -> _T: ...
|
||||
def raise_(exception: _Raisable, *args: Any) -> None: ...
|
||||
def strictly_n(
|
||||
iterable: Iterable[_T],
|
||||
n: int,
|
||||
too_short: _GenFn | None = ...,
|
||||
too_long: _GenFn | None = ...,
|
||||
) -> list[_T]: ...
|
||||
def distinct_permutations(
|
||||
iterable: Iterable[_T], r: int | None = ...
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
def intersperse(
|
||||
e: _U, iterable: Iterable[_T], n: int = ...
|
||||
) -> Iterator[_T | _U]: ...
|
||||
def unique_to_each(*iterables: Iterable[_T]) -> list[list[_T]]: ...
|
||||
@overload
|
||||
def windowed(
|
||||
seq: Iterable[_T], n: int, *, step: int = ...
|
||||
) -> Iterator[tuple[_T | None, ...]]: ...
|
||||
@overload
|
||||
def windowed(
|
||||
seq: Iterable[_T], n: int, fillvalue: _U, step: int = ...
|
||||
) -> Iterator[tuple[_T | _U, ...]]: ...
|
||||
def substrings(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
|
||||
def substrings_indexes(
|
||||
seq: Sequence[_T], reverse: bool = ...
|
||||
) -> Iterator[tuple[Sequence[_T], int, int]]: ...
|
||||
|
||||
class bucket(Generic[_T, _U], Container[_U]):
|
||||
def __init__(
|
||||
self,
|
||||
iterable: Iterable[_T],
|
||||
key: Callable[[_T], _U],
|
||||
validator: Callable[[_U], object] | None = ...,
|
||||
) -> None: ...
|
||||
def __contains__(self, value: object) -> bool: ...
|
||||
def __iter__(self) -> Iterator[_U]: ...
|
||||
def __getitem__(self, value: object) -> Iterator[_T]: ...
|
||||
|
||||
def spy(
|
||||
iterable: Iterable[_T], n: int = ...
|
||||
) -> tuple[list[_T], Iterator[_T]]: ...
|
||||
def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ...
|
||||
def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ...
|
||||
def interleave_evenly(
|
||||
iterables: list[Iterable[_T]], lengths: list[int] | None = ...
|
||||
) -> Iterator[_T]: ...
|
||||
def collapse(
|
||||
iterable: Iterable[Any],
|
||||
base_type: type | None = ...,
|
||||
levels: int | None = ...,
|
||||
) -> Iterator[Any]: ...
|
||||
@overload
|
||||
def side_effect(
|
||||
func: Callable[[_T], object],
|
||||
iterable: Iterable[_T],
|
||||
chunk_size: None = ...,
|
||||
before: Callable[[], object] | None = ...,
|
||||
after: Callable[[], object] | None = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
@overload
|
||||
def side_effect(
|
||||
func: Callable[[list[_T]], object],
|
||||
iterable: Iterable[_T],
|
||||
chunk_size: int,
|
||||
before: Callable[[], object] | None = ...,
|
||||
after: Callable[[], object] | None = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def sliced(
|
||||
seq: _SupportsSlicing[_T], n: int, strict: bool = ...
|
||||
) -> Iterator[_T]: ...
|
||||
def split_at(
|
||||
iterable: Iterable[_T],
|
||||
pred: Callable[[_T], object],
|
||||
maxsplit: int = ...,
|
||||
keep_separator: bool = ...,
|
||||
) -> Iterator[list[_T]]: ...
|
||||
def split_before(
|
||||
iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
|
||||
) -> Iterator[list[_T]]: ...
|
||||
def split_after(
|
||||
iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
|
||||
) -> Iterator[list[_T]]: ...
|
||||
def split_when(
|
||||
iterable: Iterable[_T],
|
||||
pred: Callable[[_T, _T], object],
|
||||
maxsplit: int = ...,
|
||||
) -> Iterator[list[_T]]: ...
|
||||
def split_into(
|
||||
iterable: Iterable[_T], sizes: Iterable[int | None]
|
||||
) -> Iterator[list[_T]]: ...
|
||||
@overload
|
||||
def padded(
|
||||
iterable: Iterable[_T],
|
||||
*,
|
||||
n: int | None = ...,
|
||||
next_multiple: bool = ...,
|
||||
) -> Iterator[_T | None]: ...
|
||||
@overload
|
||||
def padded(
|
||||
iterable: Iterable[_T],
|
||||
fillvalue: _U,
|
||||
n: int | None = ...,
|
||||
next_multiple: bool = ...,
|
||||
) -> Iterator[_T | _U]: ...
|
||||
@overload
|
||||
def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||
@overload
|
||||
def repeat_last(iterable: Iterable[_T], default: _U) -> Iterator[_T | _U]: ...
|
||||
def distribute(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
|
||||
@overload
|
||||
def stagger(
|
||||
iterable: Iterable[_T],
|
||||
offsets: _SizedIterable[int] = ...,
|
||||
longest: bool = ...,
|
||||
) -> Iterator[tuple[_T | None, ...]]: ...
|
||||
@overload
|
||||
def stagger(
|
||||
iterable: Iterable[_T],
|
||||
offsets: _SizedIterable[int] = ...,
|
||||
longest: bool = ...,
|
||||
fillvalue: _U = ...,
|
||||
) -> Iterator[tuple[_T | _U, ...]]: ...
|
||||
|
||||
class UnequalIterablesError(ValueError):
|
||||
def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ...
|
||||
|
||||
@overload
|
||||
def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ...
|
||||
@overload
|
||||
def zip_equal(
|
||||
__iter1: Iterable[_T1], __iter2: Iterable[_T2]
|
||||
) -> Iterator[tuple[_T1, _T2]]: ...
|
||||
@overload
|
||||
def zip_equal(
|
||||
__iter1: Iterable[_T],
|
||||
__iter2: Iterable[_T],
|
||||
__iter3: Iterable[_T],
|
||||
*iterables: Iterable[_T],
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
@overload
|
||||
def zip_offset(
|
||||
__iter1: Iterable[_T1],
|
||||
*,
|
||||
offsets: _SizedIterable[int],
|
||||
longest: bool = ...,
|
||||
fillvalue: None = None,
|
||||
) -> Iterator[tuple[_T1 | None]]: ...
|
||||
@overload
|
||||
def zip_offset(
|
||||
__iter1: Iterable[_T1],
|
||||
__iter2: Iterable[_T2],
|
||||
*,
|
||||
offsets: _SizedIterable[int],
|
||||
longest: bool = ...,
|
||||
fillvalue: None = None,
|
||||
) -> Iterator[tuple[_T1 | None, _T2 | None]]: ...
|
||||
@overload
|
||||
def zip_offset(
|
||||
__iter1: Iterable[_T],
|
||||
__iter2: Iterable[_T],
|
||||
__iter3: Iterable[_T],
|
||||
*iterables: Iterable[_T],
|
||||
offsets: _SizedIterable[int],
|
||||
longest: bool = ...,
|
||||
fillvalue: None = None,
|
||||
) -> Iterator[tuple[_T | None, ...]]: ...
|
||||
@overload
|
||||
def zip_offset(
|
||||
__iter1: Iterable[_T1],
|
||||
*,
|
||||
offsets: _SizedIterable[int],
|
||||
longest: bool = ...,
|
||||
fillvalue: _U,
|
||||
) -> Iterator[tuple[_T1 | _U]]: ...
|
||||
@overload
|
||||
def zip_offset(
|
||||
__iter1: Iterable[_T1],
|
||||
__iter2: Iterable[_T2],
|
||||
*,
|
||||
offsets: _SizedIterable[int],
|
||||
longest: bool = ...,
|
||||
fillvalue: _U,
|
||||
) -> Iterator[tuple[_T1 | _U, _T2 | _U]]: ...
|
||||
@overload
|
||||
def zip_offset(
|
||||
__iter1: Iterable[_T],
|
||||
__iter2: Iterable[_T],
|
||||
__iter3: Iterable[_T],
|
||||
*iterables: Iterable[_T],
|
||||
offsets: _SizedIterable[int],
|
||||
longest: bool = ...,
|
||||
fillvalue: _U,
|
||||
) -> Iterator[tuple[_T | _U, ...]]: ...
|
||||
def sort_together(
|
||||
iterables: Iterable[Iterable[_T]],
|
||||
key_list: Iterable[int] = ...,
|
||||
key: Callable[..., Any] | None = ...,
|
||||
reverse: bool = ...,
|
||||
) -> list[tuple[_T, ...]]: ...
|
||||
def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ...
|
||||
def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
|
||||
def always_iterable(
|
||||
obj: object,
|
||||
base_type: type | tuple[type | tuple[Any, ...], ...] | None = ...,
|
||||
) -> Iterator[Any]: ...
|
||||
def adjacent(
|
||||
predicate: Callable[[_T], bool],
|
||||
iterable: Iterable[_T],
|
||||
distance: int = ...,
|
||||
) -> Iterator[tuple[bool, _T]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: None = None,
|
||||
valuefunc: None = None,
|
||||
reducefunc: None = None,
|
||||
) -> Iterator[tuple[_T, Iterator[_T]]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: None,
|
||||
reducefunc: None,
|
||||
) -> Iterator[tuple[_U, Iterator[_T]]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: None,
|
||||
valuefunc: Callable[[_T], _V],
|
||||
reducefunc: None,
|
||||
) -> Iterable[tuple[_T, Iterable[_V]]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: Callable[[_T], _V],
|
||||
reducefunc: None,
|
||||
) -> Iterable[tuple[_U, Iterator[_V]]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: None,
|
||||
valuefunc: None,
|
||||
reducefunc: Callable[[Iterator[_T]], _W],
|
||||
) -> Iterable[tuple[_T, _W]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: None,
|
||||
reducefunc: Callable[[Iterator[_T]], _W],
|
||||
) -> Iterable[tuple[_U, _W]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: None,
|
||||
valuefunc: Callable[[_T], _V],
|
||||
reducefunc: Callable[[Iterable[_V]], _W],
|
||||
) -> Iterable[tuple[_T, _W]]: ...
|
||||
@overload
|
||||
def groupby_transform(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: Callable[[_T], _V],
|
||||
reducefunc: Callable[[Iterable[_V]], _W],
|
||||
) -> Iterable[tuple[_U, _W]]: ...
|
||||
|
||||
class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
|
||||
@overload
|
||||
def __init__(self, __stop: _T) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __start: _T, __stop: _T) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def __contains__(self, elem: object) -> bool: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
@overload
|
||||
def __getitem__(self, key: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[_T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> tuple[Type[numeric_range[_T, _U]], tuple[_T, _T, _U]]: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def __reversed__(self) -> Iterator[_T]: ...
|
||||
def count(self, value: _T) -> int: ...
|
||||
def index(self, value: _T) -> int: ... # type: ignore
|
||||
|
||||
def count_cycle(
|
||||
iterable: Iterable[_T], n: int | None = ...
|
||||
) -> Iterable[tuple[int, _T]]: ...
|
||||
def mark_ends(
|
||||
iterable: Iterable[_T],
|
||||
) -> Iterable[tuple[bool, bool, _T]]: ...
|
||||
def locate(
|
||||
iterable: Iterable[_T],
|
||||
pred: Callable[..., Any] = ...,
|
||||
window_size: int | None = ...,
|
||||
) -> Iterator[int]: ...
|
||||
def lstrip(
|
||||
iterable: Iterable[_T], pred: Callable[[_T], object]
|
||||
) -> Iterator[_T]: ...
|
||||
def rstrip(
|
||||
iterable: Iterable[_T], pred: Callable[[_T], object]
|
||||
) -> Iterator[_T]: ...
|
||||
def strip(
|
||||
iterable: Iterable[_T], pred: Callable[[_T], object]
|
||||
) -> Iterator[_T]: ...
|
||||
|
||||
class islice_extended(Generic[_T], Iterator[_T]):
|
||||
def __init__(self, iterable: Iterable[_T], *args: int | None) -> None: ...
|
||||
def __iter__(self) -> islice_extended[_T]: ...
|
||||
def __next__(self) -> _T: ...
|
||||
def __getitem__(self, index: slice) -> islice_extended[_T]: ...
|
||||
|
||||
def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||
def consecutive_groups(
|
||||
iterable: Iterable[_T], ordering: Callable[[_T], int] = ...
|
||||
) -> Iterator[Iterator[_T]]: ...
|
||||
@overload
|
||||
def difference(
|
||||
iterable: Iterable[_T],
|
||||
func: Callable[[_T, _T], _U] = ...,
|
||||
*,
|
||||
initial: None = ...,
|
||||
) -> Iterator[_T | _U]: ...
|
||||
@overload
|
||||
def difference(
|
||||
iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U
|
||||
) -> Iterator[_U]: ...
|
||||
|
||||
class SequenceView(Generic[_T], Sequence[_T]):
|
||||
def __init__(self, target: Sequence[_T]) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> Sequence[_T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
|
||||
class seekable(Generic[_T], Iterator[_T]):
|
||||
def __init__(
|
||||
self, iterable: Iterable[_T], maxlen: int | None = ...
|
||||
) -> None: ...
|
||||
def __iter__(self) -> seekable[_T]: ...
|
||||
def __next__(self) -> _T: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
@overload
|
||||
def peek(self) -> _T: ...
|
||||
@overload
|
||||
def peek(self, default: _U) -> _T | _U: ...
|
||||
def elements(self) -> SequenceView[_T]: ...
|
||||
def seek(self, index: int) -> None: ...
|
||||
def relative_seek(self, count: int) -> None: ...
|
||||
|
||||
class run_length:
|
||||
@staticmethod
|
||||
def encode(iterable: Iterable[_T]) -> Iterator[tuple[_T, int]]: ...
|
||||
@staticmethod
|
||||
def decode(iterable: Iterable[tuple[_T, int]]) -> Iterator[_T]: ...
|
||||
|
||||
def exactly_n(
|
||||
iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
|
||||
) -> bool: ...
|
||||
def circular_shifts(iterable: Iterable[_T]) -> list[tuple[_T, ...]]: ...
|
||||
def make_decorator(
|
||||
wrapping_func: Callable[..., _U], result_index: int = ...
|
||||
) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
|
||||
@overload
|
||||
def map_reduce(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: None = ...,
|
||||
reducefunc: None = ...,
|
||||
) -> dict[_U, list[_T]]: ...
|
||||
@overload
|
||||
def map_reduce(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: Callable[[_T], _V],
|
||||
reducefunc: None = ...,
|
||||
) -> dict[_U, list[_V]]: ...
|
||||
@overload
|
||||
def map_reduce(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: None = ...,
|
||||
reducefunc: Callable[[list[_T]], _W] = ...,
|
||||
) -> dict[_U, _W]: ...
|
||||
@overload
|
||||
def map_reduce(
|
||||
iterable: Iterable[_T],
|
||||
keyfunc: Callable[[_T], _U],
|
||||
valuefunc: Callable[[_T], _V],
|
||||
reducefunc: Callable[[list[_V]], _W],
|
||||
) -> dict[_U, _W]: ...
|
||||
def rlocate(
|
||||
iterable: Iterable[_T],
|
||||
pred: Callable[..., object] = ...,
|
||||
window_size: int | None = ...,
|
||||
) -> Iterator[int]: ...
|
||||
def replace(
|
||||
iterable: Iterable[_T],
|
||||
pred: Callable[..., object],
|
||||
substitutes: Iterable[_U],
|
||||
count: int | None = ...,
|
||||
window_size: int = ...,
|
||||
) -> Iterator[_T | _U]: ...
|
||||
def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ...
|
||||
def set_partitions(
|
||||
iterable: Iterable[_T], k: int | None = ...
|
||||
) -> Iterator[list[list[_T]]]: ...
|
||||
|
||||
class time_limited(Generic[_T], Iterator[_T]):
|
||||
def __init__(
|
||||
self, limit_seconds: float, iterable: Iterable[_T]
|
||||
) -> None: ...
|
||||
def __iter__(self) -> islice_extended[_T]: ...
|
||||
def __next__(self) -> _T: ...
|
||||
|
||||
@overload
|
||||
def only(
|
||||
iterable: Iterable[_T], *, too_long: _Raisable | None = ...
|
||||
) -> _T | None: ...
|
||||
@overload
|
||||
def only(
|
||||
iterable: Iterable[_T], default: _U, too_long: _Raisable | None = ...
|
||||
) -> _T | _U: ...
|
||||
def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ...
|
||||
def distinct_combinations(
|
||||
iterable: Iterable[_T], r: int
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
def filter_except(
|
||||
validator: Callable[[Any], object],
|
||||
iterable: Iterable[_T],
|
||||
*exceptions: Type[BaseException],
|
||||
) -> Iterator[_T]: ...
|
||||
def map_except(
|
||||
function: Callable[[Any], _U],
|
||||
iterable: Iterable[_T],
|
||||
*exceptions: Type[BaseException],
|
||||
) -> Iterator[_U]: ...
|
||||
def map_if(
|
||||
iterable: Iterable[Any],
|
||||
pred: Callable[[Any], bool],
|
||||
func: Callable[[Any], Any],
|
||||
func_else: Callable[[Any], Any] | None = ...,
|
||||
) -> Iterator[Any]: ...
|
||||
def sample(
|
||||
iterable: Iterable[_T],
|
||||
k: int,
|
||||
weights: Iterable[float] | None = ...,
|
||||
) -> list[_T]: ...
|
||||
def is_sorted(
|
||||
iterable: Iterable[_T],
|
||||
key: Callable[[_T], _U] | None = ...,
|
||||
reverse: bool = False,
|
||||
strict: bool = False,
|
||||
) -> bool: ...
|
||||
|
||||
class AbortThread(BaseException):
|
||||
pass
|
||||
|
||||
class callback_iter(Generic[_T], Iterator[_T]):
|
||||
def __init__(
|
||||
self,
|
||||
func: Callable[..., Any],
|
||||
callback_kwd: str = ...,
|
||||
wait_seconds: float = ...,
|
||||
) -> None: ...
|
||||
def __enter__(self) -> callback_iter[_T]: ...
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> bool | None: ...
|
||||
def __iter__(self) -> callback_iter[_T]: ...
|
||||
def __next__(self) -> _T: ...
|
||||
def _reader(self) -> Iterator[_T]: ...
|
||||
@property
|
||||
def done(self) -> bool: ...
|
||||
@property
|
||||
def result(self) -> Any: ...
|
||||
|
||||
def windowed_complete(
|
||||
iterable: Iterable[_T], n: int
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
def all_unique(
|
||||
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||
) -> bool: ...
|
||||
def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ...
|
||||
def nth_combination_with_replacement(
|
||||
iterable: Iterable[_T], r: int, index: int
|
||||
) -> tuple[_T, ...]: ...
|
||||
def nth_permutation(
|
||||
iterable: Iterable[_T], r: int, index: int
|
||||
) -> tuple[_T, ...]: ...
|
||||
def value_chain(*args: _T | Iterable[_T]) -> Iterable[_T]: ...
|
||||
def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
|
||||
def combination_index(
|
||||
element: Iterable[_T], iterable: Iterable[_T]
|
||||
) -> int: ...
|
||||
def combination_with_replacement_index(
|
||||
element: Iterable[_T], iterable: Iterable[_T]
|
||||
) -> int: ...
|
||||
def permutation_index(
|
||||
element: Iterable[_T], iterable: Iterable[_T]
|
||||
) -> int: ...
|
||||
def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ...
|
||||
|
||||
class countable(Generic[_T], Iterator[_T]):
|
||||
def __init__(self, iterable: Iterable[_T]) -> None: ...
|
||||
def __iter__(self) -> countable[_T]: ...
|
||||
def __next__(self) -> _T: ...
|
||||
|
||||
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
|
||||
def zip_broadcast(
|
||||
*objects: _T | Iterable[_T],
|
||||
scalar_types: type | tuple[type | tuple[Any, ...], ...] | None = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterable[tuple[_T, ...]]: ...
|
||||
def unique_in_window(
|
||||
iterable: Iterable[_T], n: int, key: Callable[[_T], _U] | None = ...
|
||||
) -> Iterator[_T]: ...
|
||||
def duplicates_everseen(
|
||||
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||
) -> Iterator[_T]: ...
|
||||
def duplicates_justseen(
|
||||
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||
) -> Iterator[_T]: ...
|
||||
def classify_unique(
|
||||
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||
) -> Iterator[tuple[_T, bool, bool]]: ...
|
||||
|
||||
class _SupportsLessThan(Protocol):
|
||||
def __lt__(self, __other: Any) -> bool: ...
|
||||
|
||||
_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan)
|
||||
|
||||
@overload
|
||||
def minmax(
|
||||
iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None
|
||||
) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
|
||||
@overload
|
||||
def minmax(
|
||||
iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan]
|
||||
) -> tuple[_T, _T]: ...
|
||||
@overload
|
||||
def minmax(
|
||||
iterable_or_value: Iterable[_SupportsLessThanT],
|
||||
*,
|
||||
key: None = None,
|
||||
default: _U,
|
||||
) -> _U | tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
|
||||
@overload
|
||||
def minmax(
|
||||
iterable_or_value: Iterable[_T],
|
||||
*,
|
||||
key: Callable[[_T], _SupportsLessThan],
|
||||
default: _U,
|
||||
) -> _U | tuple[_T, _T]: ...
|
||||
@overload
|
||||
def minmax(
|
||||
iterable_or_value: _SupportsLessThanT,
|
||||
__other: _SupportsLessThanT,
|
||||
*others: _SupportsLessThanT,
|
||||
) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
|
||||
@overload
|
||||
def minmax(
|
||||
iterable_or_value: _T,
|
||||
__other: _T,
|
||||
*others: _T,
|
||||
key: Callable[[_T], _SupportsLessThan],
|
||||
) -> tuple[_T, _T]: ...
|
||||
def longest_common_prefix(
|
||||
iterables: Iterable[Iterable[_T]],
|
||||
) -> Iterator[_T]: ...
|
||||
def iequals(*iterables: Iterable[Any]) -> bool: ...
|
||||
def constrained_batches(
|
||||
iterable: Iterable[_T],
|
||||
max_size: int,
|
||||
max_count: int | None = ...,
|
||||
get_len: Callable[[_T], object] = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterator[tuple[_T]]: ...
|
||||
def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
|
||||
def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
|
||||
def takewhile_inclusive(
|
||||
predicate: Callable[[_T], bool], iterable: Iterable[_T]
|
||||
) -> Iterator[_T]: ...
|
||||
def outer_product(
|
||||
func: Callable[[_T, _U], _V],
|
||||
xs: Iterable[_T],
|
||||
ys: Iterable[_U],
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[tuple[_V, ...]]: ...
|
||||
def iter_suppress(
|
||||
iterable: Iterable[_T],
|
||||
*exceptions: Type[BaseException],
|
||||
) -> Iterator[_T]: ...
|
||||
def filter_map(
|
||||
func: Callable[[_T], _V | None],
|
||||
iterable: Iterable[_T],
|
||||
) -> Iterator[_V]: ...
|
File diff suppressed because it is too large
Load Diff
@ -1,128 +0,0 @@
|
||||
"""Stubs for more_itertools.recipes"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
overload,
|
||||
Sequence,
|
||||
Type,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
# Type and type variable definitions
|
||||
_T = TypeVar('_T')
|
||||
_T1 = TypeVar('_T1')
|
||||
_T2 = TypeVar('_T2')
|
||||
_U = TypeVar('_U')
|
||||
|
||||
def take(n: int, iterable: Iterable[_T]) -> list[_T]: ...
|
||||
def tabulate(
|
||||
function: Callable[[int], _T], start: int = ...
|
||||
) -> Iterator[_T]: ...
|
||||
def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||
def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
|
||||
@overload
|
||||
def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
|
||||
@overload
|
||||
def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
|
||||
def all_equal(iterable: Iterable[_T]) -> bool: ...
|
||||
def quantify(
|
||||
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
|
||||
) -> int: ...
|
||||
def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
|
||||
def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
|
||||
def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
|
||||
def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ...
|
||||
def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
|
||||
def repeatfunc(
|
||||
func: Callable[..., _U], times: int | None = ..., *args: Any
|
||||
) -> Iterator[_U]: ...
|
||||
def pairwise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ...
|
||||
def grouper(
|
||||
iterable: Iterable[_T],
|
||||
n: int,
|
||||
incomplete: str = ...,
|
||||
fillvalue: _U = ...,
|
||||
) -> Iterator[tuple[_T | _U, ...]]: ...
|
||||
def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
|
||||
def partition(
|
||||
pred: Callable[[_T], object] | None, iterable: Iterable[_T]
|
||||
) -> tuple[Iterator[_T], Iterator[_T]]: ...
|
||||
def powerset(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
|
||||
def unique_everseen(
|
||||
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||
) -> Iterator[_T]: ...
|
||||
def unique_justseen(
|
||||
iterable: Iterable[_T], key: Callable[[_T], object] | None = ...
|
||||
) -> Iterator[_T]: ...
|
||||
@overload
|
||||
def iter_except(
|
||||
func: Callable[[], _T],
|
||||
exception: Type[BaseException] | tuple[Type[BaseException], ...],
|
||||
first: None = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
@overload
|
||||
def iter_except(
|
||||
func: Callable[[], _T],
|
||||
exception: Type[BaseException] | tuple[Type[BaseException], ...],
|
||||
first: Callable[[], _U],
|
||||
) -> Iterator[_T | _U]: ...
|
||||
@overload
|
||||
def first_true(
|
||||
iterable: Iterable[_T], *, pred: Callable[[_T], object] | None = ...
|
||||
) -> _T | None: ...
|
||||
@overload
|
||||
def first_true(
|
||||
iterable: Iterable[_T],
|
||||
default: _U,
|
||||
pred: Callable[[_T], object] | None = ...,
|
||||
) -> _T | _U: ...
|
||||
def random_product(
|
||||
*args: Iterable[_T], repeat: int = ...
|
||||
) -> tuple[_T, ...]: ...
|
||||
def random_permutation(
|
||||
iterable: Iterable[_T], r: int | None = ...
|
||||
) -> tuple[_T, ...]: ...
|
||||
def random_combination(iterable: Iterable[_T], r: int) -> tuple[_T, ...]: ...
|
||||
def random_combination_with_replacement(
|
||||
iterable: Iterable[_T], r: int
|
||||
) -> tuple[_T, ...]: ...
|
||||
def nth_combination(
|
||||
iterable: Iterable[_T], r: int, index: int
|
||||
) -> tuple[_T, ...]: ...
|
||||
def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[_T | _U]: ...
|
||||
def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
|
||||
def before_and_after(
|
||||
predicate: Callable[[_T], bool], it: Iterable[_T]
|
||||
) -> tuple[Iterator[_T], Iterator[_T]]: ...
|
||||
def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: ...
|
||||
def sliding_window(
|
||||
iterable: Iterable[_T], n: int
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
|
||||
def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
|
||||
def iter_index(
|
||||
iterable: Iterable[_T],
|
||||
value: Any,
|
||||
start: int | None = ...,
|
||||
stop: int | None = ...,
|
||||
) -> Iterator[int]: ...
|
||||
def sieve(n: int) -> Iterator[int]: ...
|
||||
def batched(
|
||||
iterable: Iterable[_T], n: int, *, strict: bool = False
|
||||
) -> Iterator[tuple[_T]]: ...
|
||||
def transpose(
|
||||
it: Iterable[Iterable[_T]],
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
def reshape(
|
||||
matrix: Iterable[Iterable[_T]], cols: int
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
|
||||
def factor(n: int) -> Iterator[int]: ...
|
||||
def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
|
||||
def sum_of_squares(it: Iterable[_T]) -> _T: ...
|
||||
def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...
|
||||
def totient(n: int) -> int: ...
|
@ -1,342 +0,0 @@
|
||||
"""
|
||||
Utilities for determining application-specific dirs. See <https://github.com/platformdirs/platformdirs> for details and
|
||||
usage.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
if sys.version_info >= (3, 8): # pragma: no cover (py38+)
|
||||
from typing import Literal
|
||||
else: # pragma: no cover (py38+)
|
||||
from ..typing_extensions import Literal
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
from .version import __version__
|
||||
from .version import __version_tuple__ as __version_info__
|
||||
|
||||
|
||||
def _set_platform_dir_class() -> type[PlatformDirsABC]:
|
||||
if sys.platform == "win32":
|
||||
from .windows import Windows as Result
|
||||
elif sys.platform == "darwin":
|
||||
from .macos import MacOS as Result
|
||||
else:
|
||||
from .unix import Unix as Result
|
||||
|
||||
if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
|
||||
|
||||
if os.getenv("SHELL") or os.getenv("PREFIX"):
|
||||
return Result
|
||||
|
||||
from .android import _android_folder
|
||||
|
||||
if _android_folder() is not None:
|
||||
from .android import Android
|
||||
|
||||
return Android # return to avoid redefinition of result
|
||||
|
||||
return Result
|
||||
|
||||
|
||||
PlatformDirs = _set_platform_dir_class() #: Currently active platform
|
||||
AppDirs = PlatformDirs #: Backwards compatibility with appdirs
|
||||
|
||||
|
||||
def user_data_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:returns: data directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir
|
||||
|
||||
|
||||
def site_data_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:returns: data directory shared by users
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir
|
||||
|
||||
|
||||
def user_config_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:returns: config directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir
|
||||
|
||||
|
||||
def site_config_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:returns: config directory shared by the users
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir
|
||||
|
||||
|
||||
def user_cache_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:returns: cache directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir
|
||||
|
||||
|
||||
def user_state_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:returns: state directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir
|
||||
|
||||
|
||||
def user_log_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:returns: log directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir
|
||||
|
||||
|
||||
def user_documents_dir() -> str:
|
||||
"""
|
||||
:returns: documents directory tied to the user
|
||||
"""
|
||||
return PlatformDirs().user_documents_dir
|
||||
|
||||
|
||||
def user_runtime_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:returns: runtime directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir
|
||||
|
||||
|
||||
def user_data_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:returns: data path tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path
|
||||
|
||||
|
||||
def site_data_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `multipath <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:returns: data path shared by users
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path
|
||||
|
||||
|
||||
def user_config_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:returns: config path tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path
|
||||
|
||||
|
||||
def site_config_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:returns: config path shared by the users
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path
|
||||
|
||||
|
||||
def user_cache_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:returns: cache path tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path
|
||||
|
||||
|
||||
def user_state_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:returns: state path tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path
|
||||
|
||||
|
||||
def user_log_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:returns: log path tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path
|
||||
|
||||
|
||||
def user_documents_path() -> Path:
|
||||
"""
|
||||
:returns: documents path tied to the user
|
||||
"""
|
||||
return PlatformDirs().user_documents_path
|
||||
|
||||
|
||||
def user_runtime_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True,
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:returns: runtime path tied to the user
|
||||
"""
|
||||
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path
|
||||
|
||||
|
||||
__all__ = [
|
||||
"__version__",
|
||||
"__version_info__",
|
||||
"PlatformDirs",
|
||||
"AppDirs",
|
||||
"PlatformDirsABC",
|
||||
"user_data_dir",
|
||||
"user_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_state_dir",
|
||||
"user_log_dir",
|
||||
"user_documents_dir",
|
||||
"user_runtime_dir",
|
||||
"site_data_dir",
|
||||
"site_config_dir",
|
||||
"user_data_path",
|
||||
"user_config_path",
|
||||
"user_cache_path",
|
||||
"user_state_path",
|
||||
"user_log_path",
|
||||
"user_documents_path",
|
||||
"user_runtime_path",
|
||||
"site_data_path",
|
||||
"site_config_path",
|
||||
]
|
@ -1,46 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from platformdirs import PlatformDirs, __version__
|
||||
|
||||
PROPS = (
|
||||
"user_data_dir",
|
||||
"user_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_state_dir",
|
||||
"user_log_dir",
|
||||
"user_documents_dir",
|
||||
"user_runtime_dir",
|
||||
"site_data_dir",
|
||||
"site_config_dir",
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
app_name = "MyApp"
|
||||
app_author = "MyCompany"
|
||||
|
||||
print(f"-- platformdirs {__version__} --")
|
||||
|
||||
print("-- app dirs (with optional 'version')")
|
||||
dirs = PlatformDirs(app_name, app_author, version="1.0")
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}")
|
||||
|
||||
print("\n-- app dirs (without optional 'version')")
|
||||
dirs = PlatformDirs(app_name, app_author)
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}")
|
||||
|
||||
print("\n-- app dirs (without optional 'appauthor')")
|
||||
dirs = PlatformDirs(app_name)
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}")
|
||||
|
||||
print("\n-- app dirs (with disabled 'appauthor')")
|
||||
dirs = PlatformDirs(app_name, appauthor=False)
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,120 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from functools import lru_cache
|
||||
from typing import cast
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
|
||||
class Android(PlatformDirsABC):
|
||||
"""
|
||||
Follows the guidance `from here <https://android.stackexchange.com/a/216132>`_. Makes use of the
|
||||
`appname <platformdirs.api.PlatformDirsABC.appname>` and
|
||||
`version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
"""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
|
||||
return self._append_app_name_and_version(cast(str, _android_folder()), "files")
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
""":return: data directory shared by users, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
"""
|
||||
:return: config directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/shared_prefs/<AppName>``
|
||||
"""
|
||||
return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users, same as `user_config_dir`"""
|
||||
return self.user_config_dir
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
""":return: cache directory tied to the user, e.g. e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>``"""
|
||||
return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
"""
|
||||
:return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
|
||||
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/log``
|
||||
"""
|
||||
path = self.user_cache_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "log")
|
||||
return path
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
"""
|
||||
:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``
|
||||
"""
|
||||
return _android_documents_folder()
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
"""
|
||||
:return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
|
||||
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/tmp``
|
||||
"""
|
||||
path = self.user_cache_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "tmp")
|
||||
return path
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_folder() -> str | None:
|
||||
""":return: base folder for the Android OS or None if cannot be found"""
|
||||
try:
|
||||
# First try to get path to android app via pyjnius
|
||||
from jnius import autoclass
|
||||
|
||||
Context = autoclass("android.content.Context") # noqa: N806
|
||||
result: str | None = Context.getFilesDir().getParentFile().getAbsolutePath()
|
||||
except Exception:
|
||||
# if fails find an android folder looking path on the sys.path
|
||||
pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
|
||||
for path in sys.path:
|
||||
if pattern.match(path):
|
||||
result = path.split("/files")[0]
|
||||
break
|
||||
else:
|
||||
result = None
|
||||
return result
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_documents_folder() -> str:
|
||||
""":return: documents folder for the Android OS"""
|
||||
# Get directories with pyjnius
|
||||
try:
|
||||
from jnius import autoclass
|
||||
|
||||
Context = autoclass("android.content.Context") # noqa: N806
|
||||
Environment = autoclass("android.os.Environment") # noqa: N806
|
||||
documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
|
||||
except Exception:
|
||||
documents_dir = "/storage/emulated/0/Documents"
|
||||
|
||||
return documents_dir
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Android",
|
||||
]
|
@ -1,156 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
|
||||
if sys.version_info >= (3, 8): # pragma: no branch
|
||||
from typing import Literal # pragma: no cover
|
||||
|
||||
|
||||
class PlatformDirsABC(ABC):
|
||||
"""
|
||||
Abstract base class for platform directories.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False,
|
||||
multipath: bool = False,
|
||||
opinion: bool = True,
|
||||
):
|
||||
"""
|
||||
Create a new platform directory.
|
||||
|
||||
:param appname: See `appname`.
|
||||
:param appauthor: See `appauthor`.
|
||||
:param version: See `version`.
|
||||
:param roaming: See `roaming`.
|
||||
:param multipath: See `multipath`.
|
||||
:param opinion: See `opinion`.
|
||||
"""
|
||||
self.appname = appname #: The name of application.
|
||||
self.appauthor = appauthor
|
||||
"""
|
||||
The name of the app author or distributing body for this application. Typically, it is the owning company name.
|
||||
Defaults to `appname`. You may pass ``False`` to disable it.
|
||||
"""
|
||||
self.version = version
|
||||
"""
|
||||
An optional version path element to append to the path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this would typically be ``<major>.<minor>``.
|
||||
"""
|
||||
self.roaming = roaming
|
||||
"""
|
||||
Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
|
||||
for roaming profiles, this user data will be synced on login (see
|
||||
`here <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>`_).
|
||||
"""
|
||||
self.multipath = multipath
|
||||
"""
|
||||
An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be
|
||||
returned. By default, the first item would only be returned.
|
||||
"""
|
||||
self.opinion = opinion #: A flag to indicating to use opinionated values.
|
||||
|
||||
def _append_app_name_and_version(self, *base: str) -> str:
|
||||
params = list(base[1:])
|
||||
if self.appname:
|
||||
params.append(self.appname)
|
||||
if self.version:
|
||||
params.append(self.version)
|
||||
return os.path.join(base[0], *params)
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_data_dir(self) -> str:
|
||||
""":return: data directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def site_data_dir(self) -> str:
|
||||
""":return: data directory shared by users"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_config_dir(self) -> str:
|
||||
""":return: config directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_cache_dir(self) -> str:
|
||||
""":return: cache directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_log_dir(self) -> str:
|
||||
""":return: log directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_documents_dir(self) -> str:
|
||||
""":return: documents directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_runtime_dir(self) -> str:
|
||||
""":return: runtime directory tied to the user"""
|
||||
|
||||
@property
|
||||
def user_data_path(self) -> Path:
|
||||
""":return: data path tied to the user"""
|
||||
return Path(self.user_data_dir)
|
||||
|
||||
@property
|
||||
def site_data_path(self) -> Path:
|
||||
""":return: data path shared by users"""
|
||||
return Path(self.site_data_dir)
|
||||
|
||||
@property
|
||||
def user_config_path(self) -> Path:
|
||||
""":return: config path tied to the user"""
|
||||
return Path(self.user_config_dir)
|
||||
|
||||
@property
|
||||
def site_config_path(self) -> Path:
|
||||
""":return: config path shared by the users"""
|
||||
return Path(self.site_config_dir)
|
||||
|
||||
@property
|
||||
def user_cache_path(self) -> Path:
|
||||
""":return: cache path tied to the user"""
|
||||
return Path(self.user_cache_dir)
|
||||
|
||||
@property
|
||||
def user_state_path(self) -> Path:
|
||||
""":return: state path tied to the user"""
|
||||
return Path(self.user_state_dir)
|
||||
|
||||
@property
|
||||
def user_log_path(self) -> Path:
|
||||
""":return: log path tied to the user"""
|
||||
return Path(self.user_log_dir)
|
||||
|
||||
@property
|
||||
def user_documents_path(self) -> Path:
|
||||
""":return: documents path tied to the user"""
|
||||
return Path(self.user_documents_dir)
|
||||
|
||||
@property
|
||||
def user_runtime_path(self) -> Path:
|
||||
""":return: runtime path tied to the user"""
|
||||
return Path(self.user_runtime_dir)
|
@ -1,64 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
|
||||
class MacOS(PlatformDirsABC):
|
||||
"""
|
||||
Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
|
||||
<https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/MacOSXDirectories/MacOSXDirectories.html>`_.
|
||||
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>` and
|
||||
`version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
"""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
""":return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/"))
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
""":return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``"""
|
||||
return self._append_app_name_and_version("/Library/Application Support")
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
""":return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/"))
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``"""
|
||||
return self._append_app_name_and_version("/Library/Preferences")
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
""":return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
""":return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
|
||||
return os.path.expanduser("~/Documents")
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
""":return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))
|
||||
|
||||
|
||||
__all__ = [
|
||||
"MacOS",
|
||||
]
|
@ -1,181 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from configparser import ConfigParser
|
||||
from pathlib import Path
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
if sys.platform.startswith("linux"): # pragma: no branch # no op check, only to please the type checker
|
||||
from os import getuid
|
||||
else:
|
||||
|
||||
def getuid() -> int:
|
||||
raise RuntimeError("should only be used on Linux")
|
||||
|
||||
|
||||
class Unix(PlatformDirsABC):
|
||||
"""
|
||||
On Unix/Linux, we follow the
|
||||
`XDG Basedir Spec <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_. The spec allows
|
||||
overriding directories with environment variables. The examples show are the default values, alongside the name of
|
||||
the environment variable that overrides them. Makes use of the
|
||||
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
||||
`version <platformdirs.api.PlatformDirsABC.version>`,
|
||||
`multipath <platformdirs.api.PlatformDirsABC.multipath>`,
|
||||
`opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
"""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
"""
|
||||
:return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
|
||||
``$XDG_DATA_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_DATA_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.local/share")
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
"""
|
||||
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
|
||||
enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
||||
path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
|
||||
"""
|
||||
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
|
||||
path = os.environ.get("XDG_DATA_DIRS", "")
|
||||
if not path.strip():
|
||||
path = f"/usr/local/share{os.pathsep}/usr/share"
|
||||
return self._with_multi_path(path)
|
||||
|
||||
def _with_multi_path(self, path: str) -> str:
|
||||
path_list = path.split(os.pathsep)
|
||||
if not self.multipath:
|
||||
path_list = path_list[0:1]
|
||||
path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list]
|
||||
return os.pathsep.join(path_list)
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
"""
|
||||
:return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
|
||||
``$XDG_CONFIG_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_CONFIG_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.config")
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
"""
|
||||
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
|
||||
is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
||||
path separator), e.g. ``/etc/xdg/$appname/$version``
|
||||
"""
|
||||
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
|
||||
path = os.environ.get("XDG_CONFIG_DIRS", "")
|
||||
if not path.strip():
|
||||
path = "/etc/xdg"
|
||||
return self._with_multi_path(path)
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
"""
|
||||
:return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
|
||||
``~/$XDG_CACHE_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_CACHE_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.cache")
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
"""
|
||||
:return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
|
||||
``$XDG_STATE_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_STATE_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.local/state")
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
"""
|
||||
:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it
|
||||
"""
|
||||
path = self.user_state_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "log")
|
||||
return path
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
"""
|
||||
:return: documents directory tied to the user, e.g. ``~/Documents``
|
||||
"""
|
||||
documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR")
|
||||
if documents_dir is None:
|
||||
documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip()
|
||||
if not documents_dir:
|
||||
documents_dir = os.path.expanduser("~/Documents")
|
||||
|
||||
return documents_dir
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
"""
|
||||
:return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
|
||||
``$XDG_RUNTIME_DIR/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_RUNTIME_DIR", "")
|
||||
if not path.strip():
|
||||
path = f"/run/user/{getuid()}"
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def site_data_path(self) -> Path:
|
||||
""":return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
|
||||
return self._first_item_as_path_if_multipath(self.site_data_dir)
|
||||
|
||||
@property
|
||||
def site_config_path(self) -> Path:
|
||||
""":return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
|
||||
return self._first_item_as_path_if_multipath(self.site_config_dir)
|
||||
|
||||
def _first_item_as_path_if_multipath(self, directory: str) -> Path:
|
||||
if self.multipath:
|
||||
# If multipath is True, the first path is returned.
|
||||
directory = directory.split(os.pathsep)[0]
|
||||
return Path(directory)
|
||||
|
||||
|
||||
def _get_user_dirs_folder(key: str) -> str | None:
|
||||
"""Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/"""
|
||||
user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs")
|
||||
if os.path.exists(user_dirs_config_path):
|
||||
parser = ConfigParser()
|
||||
|
||||
with open(user_dirs_config_path) as stream:
|
||||
# Add fake section header, so ConfigParser doesn't complain
|
||||
parser.read_string(f"[top]\n{stream.read()}")
|
||||
|
||||
if key not in parser["top"]:
|
||||
return None
|
||||
|
||||
path = parser["top"][key].strip('"')
|
||||
# Handle relative home paths
|
||||
path = path.replace("$HOME", os.path.expanduser("~"))
|
||||
return path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Unix",
|
||||
]
|
@ -1,4 +0,0 @@
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
__version__ = version = '2.6.2'
|
||||
__version_tuple__ = version_tuple = (2, 6, 2)
|
@ -1,184 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import ctypes
|
||||
import os
|
||||
import sys
|
||||
from functools import lru_cache
|
||||
from typing import Callable
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
|
||||
class Windows(PlatformDirsABC):
|
||||
"""`MSDN on where to store app data files
|
||||
<http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120>`_.
|
||||
Makes use of the
|
||||
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
||||
`appauthor <platformdirs.api.PlatformDirsABC.appauthor>`,
|
||||
`version <platformdirs.api.PlatformDirsABC.version>`,
|
||||
`roaming <platformdirs.api.PlatformDirsABC.roaming>`,
|
||||
`opinion <platformdirs.api.PlatformDirsABC.opinion>`."""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
"""
|
||||
:return: data directory tied to the user, e.g.
|
||||
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
|
||||
``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
|
||||
"""
|
||||
const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
|
||||
path = os.path.normpath(get_win_folder(const))
|
||||
return self._append_parts(path)
|
||||
|
||||
def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
|
||||
params = []
|
||||
if self.appname:
|
||||
if self.appauthor is not False:
|
||||
author = self.appauthor or self.appname
|
||||
params.append(author)
|
||||
params.append(self.appname)
|
||||
if opinion_value is not None and self.opinion:
|
||||
params.append(opinion_value)
|
||||
if self.version:
|
||||
params.append(self.version)
|
||||
return os.path.join(path, *params)
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
""":return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
|
||||
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
return self._append_parts(path)
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
""":return: config directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users, same as `site_data_dir`"""
|
||||
return self.site_data_dir
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
"""
|
||||
:return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
|
||||
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
|
||||
"""
|
||||
path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||
return self._append_parts(path, opinion_value="Cache")
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
"""
|
||||
:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it
|
||||
"""
|
||||
path = self.user_data_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "Logs")
|
||||
return path
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
"""
|
||||
:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``
|
||||
"""
|
||||
return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
"""
|
||||
:return: runtime directory tied to the user, e.g.
|
||||
``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
|
||||
"""
|
||||
path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))
|
||||
return self._append_parts(path)
|
||||
|
||||
|
||||
def get_win_folder_from_env_vars(csidl_name: str) -> str:
|
||||
"""Get folder from environment variables."""
|
||||
if csidl_name == "CSIDL_PERSONAL": # does not have an environment name
|
||||
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")
|
||||
|
||||
env_var_name = {
|
||||
"CSIDL_APPDATA": "APPDATA",
|
||||
"CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
|
||||
"CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
|
||||
}.get(csidl_name)
|
||||
if env_var_name is None:
|
||||
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
||||
result = os.environ.get(env_var_name)
|
||||
if result is None:
|
||||
raise ValueError(f"Unset environment variable: {env_var_name}")
|
||||
return result
|
||||
|
||||
|
||||
def get_win_folder_from_registry(csidl_name: str) -> str:
|
||||
"""Get folder from the registry.
|
||||
|
||||
This is a fallback technique at best. I'm not sure if using the
|
||||
registry for this guarantees us the correct answer for all CSIDL_*
|
||||
names.
|
||||
"""
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||
"CSIDL_PERSONAL": "Personal",
|
||||
}.get(csidl_name)
|
||||
if shell_folder_name is None:
|
||||
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
||||
if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows
|
||||
raise NotImplementedError
|
||||
import winreg
|
||||
|
||||
key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
|
||||
directory, _ = winreg.QueryValueEx(key, shell_folder_name)
|
||||
return str(directory)
|
||||
|
||||
|
||||
def get_win_folder_via_ctypes(csidl_name: str) -> str:
|
||||
"""Get folder with ctypes."""
|
||||
csidl_const = {
|
||||
"CSIDL_APPDATA": 26,
|
||||
"CSIDL_COMMON_APPDATA": 35,
|
||||
"CSIDL_LOCAL_APPDATA": 28,
|
||||
"CSIDL_PERSONAL": 5,
|
||||
}.get(csidl_name)
|
||||
if csidl_const is None:
|
||||
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
||||
|
||||
buf = ctypes.create_unicode_buffer(1024)
|
||||
windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker
|
||||
windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||
|
||||
# Downgrade to short path name if it has highbit chars.
|
||||
if any(ord(c) > 255 for c in buf):
|
||||
buf2 = ctypes.create_unicode_buffer(1024)
|
||||
if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||
buf = buf2
|
||||
|
||||
return buf.value
|
||||
|
||||
|
||||
def _pick_get_win_folder() -> Callable[[str], str]:
|
||||
if hasattr(ctypes, "windll"):
|
||||
return get_win_folder_via_ctypes
|
||||
try:
|
||||
import winreg # noqa: F401
|
||||
except ImportError:
|
||||
return get_win_folder_from_env_vars
|
||||
else:
|
||||
return get_win_folder_from_registry
|
||||
|
||||
|
||||
get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
|
||||
|
||||
__all__ = [
|
||||
"Windows",
|
||||
]
|
424
third_party/python/setuptools/pkg_resources/api_tests.txt
vendored
Normal file
424
third_party/python/setuptools/pkg_resources/api_tests.txt
vendored
Normal file
@ -0,0 +1,424 @@
|
||||
Pluggable Distributions of Python Software
|
||||
==========================================
|
||||
|
||||
Distributions
|
||||
-------------
|
||||
|
||||
A "Distribution" is a collection of files that represent a "Release" of a
|
||||
"Project" as of a particular point in time, denoted by a
|
||||
"Version"::
|
||||
|
||||
>>> import sys, pkg_resources
|
||||
>>> from pkg_resources import Distribution
|
||||
>>> Distribution(project_name="Foo", version="1.2")
|
||||
Foo 1.2
|
||||
|
||||
Distributions have a location, which can be a filename, URL, or really anything
|
||||
else you care to use::
|
||||
|
||||
>>> dist = Distribution(
|
||||
... location="http://example.com/something",
|
||||
... project_name="Bar", version="0.9"
|
||||
... )
|
||||
|
||||
>>> dist
|
||||
Bar 0.9 (http://example.com/something)
|
||||
|
||||
|
||||
Distributions have various introspectable attributes::
|
||||
|
||||
>>> dist.location
|
||||
'http://example.com/something'
|
||||
|
||||
>>> dist.project_name
|
||||
'Bar'
|
||||
|
||||
>>> dist.version
|
||||
'0.9'
|
||||
|
||||
>>> dist.py_version == '{}.{}'.format(*sys.version_info)
|
||||
True
|
||||
|
||||
>>> print(dist.platform)
|
||||
None
|
||||
|
||||
Including various computed attributes::
|
||||
|
||||
>>> from pkg_resources import parse_version
|
||||
>>> dist.parsed_version == parse_version(dist.version)
|
||||
True
|
||||
|
||||
>>> dist.key # case-insensitive form of the project name
|
||||
'bar'
|
||||
|
||||
Distributions are compared (and hashed) by version first::
|
||||
|
||||
>>> Distribution(version='1.0') == Distribution(version='1.0')
|
||||
True
|
||||
>>> Distribution(version='1.0') == Distribution(version='1.1')
|
||||
False
|
||||
>>> Distribution(version='1.0') < Distribution(version='1.1')
|
||||
True
|
||||
|
||||
but also by project name (case-insensitive), platform, Python version,
|
||||
location, etc.::
|
||||
|
||||
>>> Distribution(project_name="Foo",version="1.0") == \
|
||||
... Distribution(project_name="Foo",version="1.0")
|
||||
True
|
||||
|
||||
>>> Distribution(project_name="Foo",version="1.0") == \
|
||||
... Distribution(project_name="foo",version="1.0")
|
||||
True
|
||||
|
||||
>>> Distribution(project_name="Foo",version="1.0") == \
|
||||
... Distribution(project_name="Foo",version="1.1")
|
||||
False
|
||||
|
||||
>>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
|
||||
... Distribution(project_name="Foo",py_version="2.4",version="1.0")
|
||||
False
|
||||
|
||||
>>> Distribution(location="spam",version="1.0") == \
|
||||
... Distribution(location="spam",version="1.0")
|
||||
True
|
||||
|
||||
>>> Distribution(location="spam",version="1.0") == \
|
||||
... Distribution(location="baz",version="1.0")
|
||||
False
|
||||
|
||||
|
||||
|
||||
Hash and compare distribution by prio/plat
|
||||
|
||||
Get version from metadata
|
||||
provider capabilities
|
||||
egg_name()
|
||||
as_requirement()
|
||||
from_location, from_filename (w/path normalization)
|
||||
|
||||
Releases may have zero or more "Requirements", which indicate
|
||||
what releases of another project the release requires in order to
|
||||
function. A Requirement names the other project, expresses some criteria
|
||||
as to what releases of that project are acceptable, and lists any "Extras"
|
||||
that the requiring release may need from that project. (An Extra is an
|
||||
optional feature of a Release, that can only be used if its additional
|
||||
Requirements are satisfied.)
|
||||
|
||||
|
||||
|
||||
The Working Set
|
||||
---------------
|
||||
|
||||
A collection of active distributions is called a Working Set. Note that a
|
||||
Working Set can contain any importable distribution, not just pluggable ones.
|
||||
For example, the Python standard library is an importable distribution that
|
||||
will usually be part of the Working Set, even though it is not pluggable.
|
||||
Similarly, when you are doing development work on a project, the files you are
|
||||
editing are also a Distribution. (And, with a little attention to the
|
||||
directory names used, and including some additional metadata, such a
|
||||
"development distribution" can be made pluggable as well.)
|
||||
|
||||
>>> from pkg_resources import WorkingSet
|
||||
|
||||
A working set's entries are the sys.path entries that correspond to the active
|
||||
distributions. By default, the working set's entries are the items on
|
||||
``sys.path``::
|
||||
|
||||
>>> ws = WorkingSet()
|
||||
>>> ws.entries == sys.path
|
||||
True
|
||||
|
||||
But you can also create an empty working set explicitly, and add distributions
|
||||
to it::
|
||||
|
||||
>>> ws = WorkingSet([])
|
||||
>>> ws.add(dist)
|
||||
>>> ws.entries
|
||||
['http://example.com/something']
|
||||
>>> dist in ws
|
||||
True
|
||||
>>> Distribution('foo',version="") in ws
|
||||
False
|
||||
|
||||
And you can iterate over its distributions::
|
||||
|
||||
>>> list(ws)
|
||||
[Bar 0.9 (http://example.com/something)]
|
||||
|
||||
Adding the same distribution more than once is a no-op::
|
||||
|
||||
>>> ws.add(dist)
|
||||
>>> list(ws)
|
||||
[Bar 0.9 (http://example.com/something)]
|
||||
|
||||
For that matter, adding multiple distributions for the same project also does
|
||||
nothing, because a working set can only hold one active distribution per
|
||||
project -- the first one added to it::
|
||||
|
||||
>>> ws.add(
|
||||
... Distribution(
|
||||
... 'http://example.com/something', project_name="Bar",
|
||||
... version="7.2"
|
||||
... )
|
||||
... )
|
||||
>>> list(ws)
|
||||
[Bar 0.9 (http://example.com/something)]
|
||||
|
||||
You can append a path entry to a working set using ``add_entry()``::
|
||||
|
||||
>>> ws.entries
|
||||
['http://example.com/something']
|
||||
>>> ws.add_entry(pkg_resources.__file__)
|
||||
>>> ws.entries
|
||||
['http://example.com/something', '...pkg_resources...']
|
||||
|
||||
Multiple additions result in multiple entries, even if the entry is already in
|
||||
the working set (because ``sys.path`` can contain the same entry more than
|
||||
once)::
|
||||
|
||||
>>> ws.add_entry(pkg_resources.__file__)
|
||||
>>> ws.entries
|
||||
['...example.com...', '...pkg_resources...', '...pkg_resources...']
|
||||
|
||||
And you can specify the path entry a distribution was found under, using the
|
||||
optional second parameter to ``add()``::
|
||||
|
||||
>>> ws = WorkingSet([])
|
||||
>>> ws.add(dist,"foo")
|
||||
>>> ws.entries
|
||||
['foo']
|
||||
|
||||
But even if a distribution is found under multiple path entries, it still only
|
||||
shows up once when iterating the working set:
|
||||
|
||||
>>> ws.add_entry(ws.entries[0])
|
||||
>>> list(ws)
|
||||
[Bar 0.9 (http://example.com/something)]
|
||||
|
||||
You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
|
||||
|
||||
>>> from pkg_resources import Requirement
|
||||
>>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None
|
||||
None
|
||||
|
||||
>>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution
|
||||
Bar 0.9 (http://example.com/something)
|
||||
|
||||
Note that asking for a conflicting version of a distribution already in a
|
||||
working set triggers a ``pkg_resources.VersionConflict`` error:
|
||||
|
||||
>>> try:
|
||||
... ws.find(Requirement.parse("Bar==1.0"))
|
||||
... except pkg_resources.VersionConflict as exc:
|
||||
... print(str(exc))
|
||||
... else:
|
||||
... raise AssertionError("VersionConflict was not raised")
|
||||
(Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0'))
|
||||
|
||||
You can subscribe a callback function to receive notifications whenever a new
|
||||
distribution is added to a working set. The callback is immediately invoked
|
||||
once for each existing distribution in the working set, and then is called
|
||||
again for new distributions added thereafter::
|
||||
|
||||
>>> def added(dist): print("Added %s" % dist)
|
||||
>>> ws.subscribe(added)
|
||||
Added Bar 0.9
|
||||
>>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
|
||||
>>> ws.add(foo12)
|
||||
Added Foo 1.2
|
||||
|
||||
Note, however, that only the first distribution added for a given project name
|
||||
will trigger a callback, even during the initial ``subscribe()`` callback::
|
||||
|
||||
>>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
|
||||
>>> ws.add(foo14) # no callback, because Foo 1.2 is already active
|
||||
|
||||
>>> ws = WorkingSet([])
|
||||
>>> ws.add(foo12)
|
||||
>>> ws.add(foo14)
|
||||
>>> ws.subscribe(added)
|
||||
Added Foo 1.2
|
||||
|
||||
And adding a callback more than once has no effect, either::
|
||||
|
||||
>>> ws.subscribe(added) # no callbacks
|
||||
|
||||
# and no double-callbacks on subsequent additions, either
|
||||
>>> just_a_test = Distribution(project_name="JustATest", version="0.99")
|
||||
>>> ws.add(just_a_test)
|
||||
Added JustATest 0.99
|
||||
|
||||
|
||||
Finding Plugins
|
||||
---------------
|
||||
|
||||
``WorkingSet`` objects can be used to figure out what plugins in an
|
||||
``Environment`` can be loaded without any resolution errors::
|
||||
|
||||
>>> from pkg_resources import Environment
|
||||
|
||||
>>> plugins = Environment([]) # normally, a list of plugin directories
|
||||
>>> plugins.add(foo12)
|
||||
>>> plugins.add(foo14)
|
||||
>>> plugins.add(just_a_test)
|
||||
|
||||
In the simplest case, we just get the newest version of each distribution in
|
||||
the plugin environment::
|
||||
|
||||
>>> ws = WorkingSet([])
|
||||
>>> ws.find_plugins(plugins)
|
||||
([JustATest 0.99, Foo 1.4 (f14)], {})
|
||||
|
||||
But if there's a problem with a version conflict or missing requirements, the
|
||||
method falls back to older versions, and the error info dict will contain an
|
||||
exception instance for each unloadable plugin::
|
||||
|
||||
>>> ws.add(foo12) # this will conflict with Foo 1.4
|
||||
>>> ws.find_plugins(plugins)
|
||||
([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
|
||||
|
||||
But if you disallow fallbacks, the failed plugin will be skipped instead of
|
||||
trying older versions::
|
||||
|
||||
>>> ws.find_plugins(plugins, fallback=False)
|
||||
([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
|
||||
|
||||
|
||||
|
||||
Platform Compatibility Rules
|
||||
----------------------------
|
||||
|
||||
On the Mac, there are potential compatibility issues for modules compiled
|
||||
on newer versions of macOS than what the user is running. Additionally,
|
||||
macOS will soon have two platforms to contend with: Intel and PowerPC.
|
||||
|
||||
Basic equality works as on other platforms::
|
||||
|
||||
>>> from pkg_resources import compatible_platforms as cp
|
||||
>>> reqd = 'macosx-10.4-ppc'
|
||||
>>> cp(reqd, reqd)
|
||||
True
|
||||
>>> cp("win32", reqd)
|
||||
False
|
||||
|
||||
Distributions made on other machine types are not compatible::
|
||||
|
||||
>>> cp("macosx-10.4-i386", reqd)
|
||||
False
|
||||
|
||||
Distributions made on earlier versions of the OS are compatible, as
|
||||
long as they are from the same top-level version. The patchlevel version
|
||||
number does not matter::
|
||||
|
||||
>>> cp("macosx-10.4-ppc", reqd)
|
||||
True
|
||||
>>> cp("macosx-10.3-ppc", reqd)
|
||||
True
|
||||
>>> cp("macosx-10.5-ppc", reqd)
|
||||
False
|
||||
>>> cp("macosx-9.5-ppc", reqd)
|
||||
False
|
||||
|
||||
Backwards compatibility for packages made via earlier versions of
|
||||
setuptools is provided as well::
|
||||
|
||||
>>> cp("darwin-8.2.0-Power_Macintosh", reqd)
|
||||
True
|
||||
>>> cp("darwin-7.2.0-Power_Macintosh", reqd)
|
||||
True
|
||||
>>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
|
||||
False
|
||||
|
||||
|
||||
Environment Markers
|
||||
-------------------
|
||||
|
||||
>>> from pkg_resources import invalid_marker as im, evaluate_marker as em
|
||||
>>> import os
|
||||
|
||||
>>> print(im("sys_platform"))
|
||||
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
||||
sys_platform
|
||||
^
|
||||
|
||||
>>> print(im("sys_platform=="))
|
||||
Expected a marker variable or quoted string
|
||||
sys_platform==
|
||||
^
|
||||
|
||||
>>> print(im("sys_platform=='win32'"))
|
||||
False
|
||||
|
||||
>>> print(im("sys=='x'"))
|
||||
Expected a marker variable or quoted string
|
||||
sys=='x'
|
||||
^
|
||||
|
||||
>>> print(im("(extra)"))
|
||||
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
||||
(extra)
|
||||
^
|
||||
|
||||
>>> print(im("(extra"))
|
||||
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
||||
(extra
|
||||
^
|
||||
|
||||
>>> print(im("os.open('foo')=='y'"))
|
||||
Expected a marker variable or quoted string
|
||||
os.open('foo')=='y'
|
||||
^
|
||||
|
||||
>>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit!
|
||||
Expected a marker variable or quoted string
|
||||
'x'=='y' and os.open('foo')=='y'
|
||||
^
|
||||
|
||||
>>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit!
|
||||
Expected a marker variable or quoted string
|
||||
'x'=='x' or os.open('foo')=='y'
|
||||
^
|
||||
|
||||
>>> print(im("r'x'=='x'"))
|
||||
Expected a marker variable or quoted string
|
||||
r'x'=='x'
|
||||
^
|
||||
|
||||
>>> print(im("'''x'''=='x'"))
|
||||
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
||||
'''x'''=='x'
|
||||
^
|
||||
|
||||
>>> print(im('"""x"""=="x"'))
|
||||
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
||||
"""x"""=="x"
|
||||
^
|
||||
|
||||
>>> print(im(r"x\n=='x'"))
|
||||
Expected a marker variable or quoted string
|
||||
x\n=='x'
|
||||
^
|
||||
|
||||
>>> print(im("os.open=='y'"))
|
||||
Expected a marker variable or quoted string
|
||||
os.open=='y'
|
||||
^
|
||||
|
||||
>>> em("sys_platform=='win32'") == (sys.platform=='win32')
|
||||
True
|
||||
|
||||
>>> em("python_version >= '2.7'")
|
||||
True
|
||||
|
||||
>>> em("python_version > '2.6'")
|
||||
True
|
||||
|
||||
>>> im("implementation_name=='cpython'")
|
||||
False
|
||||
|
||||
>>> im("platform_python_implementation=='CPython'")
|
||||
False
|
||||
|
||||
>>> im("implementation_version=='3.5.1'")
|
||||
False
|
@ -1,89 +0,0 @@
|
||||
import importlib.util
|
||||
import sys
|
||||
|
||||
|
||||
class VendorImporter:
|
||||
"""
|
||||
A PEP 302 meta path importer for finding optionally-vendored
|
||||
or otherwise naturally-installed packages from root_name.
|
||||
"""
|
||||
|
||||
def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
|
||||
self.root_name = root_name
|
||||
self.vendored_names = set(vendored_names)
|
||||
self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
|
||||
|
||||
@property
|
||||
def search_path(self):
|
||||
"""
|
||||
Search first the vendor package then as a natural package.
|
||||
"""
|
||||
yield self.vendor_pkg + '.'
|
||||
yield ''
|
||||
|
||||
def _module_matches_namespace(self, fullname):
|
||||
"""Figure out if the target module is vendored."""
|
||||
root, base, target = fullname.partition(self.root_name + '.')
|
||||
return not root and any(map(target.startswith, self.vendored_names))
|
||||
|
||||
def load_module(self, fullname):
|
||||
"""
|
||||
Iterate over the search path to locate and load fullname.
|
||||
"""
|
||||
root, base, target = fullname.partition(self.root_name + '.')
|
||||
for prefix in self.search_path:
|
||||
try:
|
||||
extant = prefix + target
|
||||
__import__(extant)
|
||||
mod = sys.modules[extant]
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
raise ImportError(
|
||||
"The '{target}' package is required; "
|
||||
"normally this is bundled with this package so if you get "
|
||||
"this warning, consult the packager of your "
|
||||
"distribution.".format(**locals())
|
||||
)
|
||||
|
||||
def create_module(self, spec):
|
||||
return self.load_module(spec.name)
|
||||
|
||||
def exec_module(self, module):
|
||||
pass
|
||||
|
||||
def find_spec(self, fullname, path=None, target=None):
|
||||
"""Return a module spec for vendored names."""
|
||||
return (
|
||||
importlib.util.spec_from_loader(fullname, self)
|
||||
if self._module_matches_namespace(fullname)
|
||||
else None
|
||||
)
|
||||
|
||||
def install(self):
|
||||
"""
|
||||
Install this importer into sys.meta_path if not already present.
|
||||
"""
|
||||
if self not in sys.meta_path:
|
||||
sys.meta_path.append(self)
|
||||
|
||||
|
||||
# [[[cog
|
||||
# import cog
|
||||
# from tools.vendored import yield_top_level
|
||||
# names = "\n".join(f" {x!r}," for x in yield_top_level('pkg_resources'))
|
||||
# cog.outl(f"names = (\n{names}\n)")
|
||||
# ]]]
|
||||
names = (
|
||||
'backports',
|
||||
'importlib_resources',
|
||||
'jaraco',
|
||||
'more_itertools',
|
||||
'packaging',
|
||||
'platformdirs',
|
||||
'zipp',
|
||||
)
|
||||
# [[[end]]]
|
||||
VendorImporter(__name__, names).install()
|
@ -1,265 +0,0 @@
|
||||
distutils-precedence.pth,sha256=JjjOniUA5XKl4N5_rtZmHrVp0baW_LoHsN0iPaX10iQ,151
|
||||
_distutils_hack/__init__.py,sha256=S_WwvI-K985wlulvcWfPT3dvLMCYP1yPh2yngLOmd4E,6002
|
||||
_distutils_hack/override.py,sha256=Eu_s-NF6VIZ4Cqd0tbbA5wtWky2IZPNd8et6GLt1mzo,44
|
||||
pkg_resources/__init__.py,sha256=g6EKB_MfCpW243xn2wIZjaMEW3qFC_CxMamcuYpWqqQ,111421
|
||||
pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
||||
pkg_resources/_vendor/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
|
||||
pkg_resources/_vendor/importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
|
||||
pkg_resources/_vendor/importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
|
||||
pkg_resources/_vendor/importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
|
||||
pkg_resources/_vendor/importlib_resources/_compat.py,sha256=L8HTWyAC_MIKuxWZuw0zvTq5qmUA0ttrvK941OzDKU8,2925
|
||||
pkg_resources/_vendor/importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
|
||||
pkg_resources/_vendor/importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481
|
||||
pkg_resources/_vendor/importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140
|
||||
pkg_resources/_vendor/importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
|
||||
pkg_resources/_vendor/importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
|
||||
pkg_resources/_vendor/jaraco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/jaraco/context.py,sha256=kScHctkohFywkhG-BhQXPo6dRH0Wj12FxxatDc_a5SY,9573
|
||||
pkg_resources/_vendor/jaraco/functools/__init__.py,sha256=wX5n1bAtZGFfApXarzPRqVAcNoOVzGVg9SLKyK8yYZA,16705
|
||||
pkg_resources/_vendor/jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
|
||||
pkg_resources/_vendor/jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/jaraco/text/__init__.py,sha256=cN55bFcceW4wTHG5ruv5IuEDRarP-4hBYX8zl94_c30,15526
|
||||
pkg_resources/_vendor/more_itertools/__init__.py,sha256=VodgFyRJvpnHbAMgseYRiP7r928FFOAakmQrl6J88os,149
|
||||
pkg_resources/_vendor/more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
|
||||
pkg_resources/_vendor/more_itertools/more.py,sha256=Z4jylUFbhmn4gSZNFU2P0BgjzD_6uOcV8dfEmY3JZzY,143053
|
||||
pkg_resources/_vendor/more_itertools/more.pyi,sha256=KTHYeqr0rFbn1GWRnv0jY64JRNnKKT0kA3kmsah8DYQ,21044
|
||||
pkg_resources/_vendor/more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/more_itertools/recipes.py,sha256=Rb3OhzJTCn2biutDEUSImbuY-8NDS1lkHt0My-uCOf4,27548
|
||||
pkg_resources/_vendor/more_itertools/recipes.pyi,sha256=T1IuEVXCqw2NeJJNW036MtWi8BVfR8Ilpf7cBmvhBaQ,4436
|
||||
pkg_resources/_vendor/packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496
|
||||
pkg_resources/_vendor/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
|
||||
pkg_resources/_vendor/packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590
|
||||
pkg_resources/_vendor/packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676
|
||||
pkg_resources/_vendor/packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347
|
||||
pkg_resources/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
||||
pkg_resources/_vendor/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
|
||||
pkg_resources/_vendor/packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
|
||||
pkg_resources/_vendor/packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036
|
||||
pkg_resources/_vendor/packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
|
||||
pkg_resources/_vendor/packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784
|
||||
pkg_resources/_vendor/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
|
||||
pkg_resources/_vendor/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
|
||||
pkg_resources/_vendor/packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236
|
||||
pkg_resources/_vendor/platformdirs/__init__.py,sha256=edi2JSKpLCapqir0AW_CjpHtinRE3hf6aDk5-VHggLk,12806
|
||||
pkg_resources/_vendor/platformdirs/__main__.py,sha256=VsC0t5m-6f0YVr96PVks93G3EDF8MSNY4KpUMvPahDA,1164
|
||||
pkg_resources/_vendor/platformdirs/android.py,sha256=GKizhyS7ESRiU67u8UnBJLm46goau9937EchXWbPBlk,4068
|
||||
pkg_resources/_vendor/platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910
|
||||
pkg_resources/_vendor/platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655
|
||||
pkg_resources/_vendor/platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/platformdirs/unix.py,sha256=P-WQjSSieE38DXjMDa1t4XHnKJQ5idEaKT0PyXwm8KQ,6911
|
||||
pkg_resources/_vendor/platformdirs/version.py,sha256=qaN-fw_htIgKUVXoAuAEVgKxQu3tZ9qE2eiKkWIS7LA,160
|
||||
pkg_resources/_vendor/platformdirs/windows.py,sha256=LOrXLgI0CjQldDo2zhOZYGYZ6g4e_cJOCB_pF9aMRWQ,6596
|
||||
pkg_resources/extern/__init__.py,sha256=GHPTDF4xYRr_wUMyabbXL0ixbzwAOikvnwTXChEhqTE,2670
|
||||
setuptools/__init__.py,sha256=ntNUdBIKpj4fQhVTWb3TUdQyC0T7iGsd9lwBtjca97I,8950
|
||||
setuptools/_core_metadata.py,sha256=9xBPVtBImw375XietaDA6TWy2s8br4URR1vrDbXDopE,9206
|
||||
setuptools/_entry_points.py,sha256=WQUzJmmu_VwiGeSFRRPYL7Dmf0nXjtbv_qWp397Romo,2333
|
||||
setuptools/_imp.py,sha256=as9Lnjl7cZHSD2SE1ShiSOK9kP7oy0MBps_MD832sFw,2443
|
||||
setuptools/_importlib.py,sha256=ZWlYbGHjb-QwRpH3SQ9uuxn_X-F2ihcQCS5HtT_W9lk,1468
|
||||
setuptools/_itertools.py,sha256=pZAgXNz6tRPUFnHAaKJ90xAgD0gLPemcE1396Zgz73o,675
|
||||
setuptools/_normalization.py,sha256=tHGXlQIPTDkp6Pxe4aKjRFwG0KOjS4httfHM2hzkLEQ,4567
|
||||
setuptools/_path.py,sha256=9tt6GFnNgFPDMIbupEYk8EiOX9k2Wj9hpdkK6kb1HeY,1178
|
||||
setuptools/_reqs.py,sha256=hUjP8Mh6D6BbLEhlFa0zksdLnLF2tMxgFPuQSJApEd4,1112
|
||||
setuptools/archive_util.py,sha256=lRK7l7GkpLJeNqnESJWUfDre4q4wR9x6Z8WD3cIagXc,7331
|
||||
setuptools/build_meta.py,sha256=6QW_PnprxN7b5DwtTb5odieDkHUbfVNLcsYeDgdXqMs,18734
|
||||
setuptools/cli-32.exe,sha256=MqzBvFQxFsviz_EMuGd3LfLyVP8mNMhwrvC0bEtpb9s,11776
|
||||
setuptools/cli-64.exe,sha256=u7PeVwdinmpgoMI4zUd7KPB_AGaYL9qVP6b87DkHOko,14336
|
||||
setuptools/cli-arm64.exe,sha256=uafQjaiA36yLz1SOuksG-1m28JsX0zFIoPZhgyiSbGE,13824
|
||||
setuptools/cli.exe,sha256=MqzBvFQxFsviz_EMuGd3LfLyVP8mNMhwrvC0bEtpb9s,11776
|
||||
setuptools/depends.py,sha256=fty6ArbOrU0QJxPIneqYNIvXB1nGL1NajjxcvWPztqQ,5599
|
||||
setuptools/discovery.py,sha256=_Wm2FkHoH5a8uF_EBauQF_MqtGxEKzWLEe0G7plyOrU,21152
|
||||
setuptools/dist.py,sha256=3GlwGGDTz6wAUJdjFgAZWxLtti7HMSpNgeZ0QLWV044,37613
|
||||
setuptools/errors.py,sha256=FS-3MTIzgv7ciswOrK71KMuEUcYh9kkWUYXd895Gbew,2669
|
||||
setuptools/extension.py,sha256=xabD6ozEdZy3H4O-TLO6fP4WW-RTDMZ76WTSbdmiIVQ,5807
|
||||
setuptools/glob.py,sha256=BYhyV7s5INjv-JMsz4SNuQWGTzY4lZVLD5_euTYsFxc,4852
|
||||
setuptools/gui-32.exe,sha256=hdrh6V13hF8stZvKw9Sv50u-TJGpvMW_SnHNQxBNvnw,11776
|
||||
setuptools/gui-64.exe,sha256=NHG2FA6txkEid9u-_j_vjDRaDxpZd2CGuAo2GMOoPjs,14336
|
||||
setuptools/gui-arm64.exe,sha256=5pT0dDQFyLWSb_RX22_n8aEt7HwWqcOGR4TT9OB64Jc,13824
|
||||
setuptools/gui.exe,sha256=hdrh6V13hF8stZvKw9Sv50u-TJGpvMW_SnHNQxBNvnw,11776
|
||||
setuptools/installer.py,sha256=2U2xHSbylwS8lxyy6PvLpSJGmKaiDzN3SjTB-e0oL4I,4969
|
||||
setuptools/launch.py,sha256=TyPT-Ic1T2EnYvGO26gfNRP4ysBlrhpbRjQxWsiO414,812
|
||||
setuptools/logging.py,sha256=JA7DVtLlC3gskysgtORtm9-4UWh9kWr9FjbXbdQsIRo,1239
|
||||
setuptools/modified.py,sha256=lDbr7ds0ZpxYN8i8b4DmwuGJhDED5QmvQKEd49gZbAY,190
|
||||
setuptools/monkey.py,sha256=WR35s98H6ICfmJidgJWhyuIRY1MLnC6iYrLK2mQV6i8,4303
|
||||
setuptools/msvc.py,sha256=5OeUPPglRPWcCQZvJ66cLOVuweWt6FA9N83B5Izb2l0,47476
|
||||
setuptools/namespaces.py,sha256=hlrf8REOKRK9XttpNbfovk4WwGQLB7KEOtfvEeHC2Ys,3131
|
||||
setuptools/package_index.py,sha256=xQL6uxDpcAQHDorgI11VvF-0w0XztrCVDuUaRqJzgzs,39108
|
||||
setuptools/sandbox.py,sha256=FBuu_kh89fOmJTEmtSuTQ68FWJ_F3bJeWQIPCg_Ae0Q,14731
|
||||
setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218
|
||||
setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138
|
||||
setuptools/unicode_utils.py,sha256=LlYPwQMxTjLr_YTbMrd1qSADh9gGH7DUiwJBIBGQ7GY,3181
|
||||
setuptools/version.py,sha256=WJCeUuyq74Aok2TeK9-OexZOu8XrlQy7-y0BEuWNovQ,161
|
||||
setuptools/warnings.py,sha256=e-R_k8T3HYIC2DScA4nzjcwigsXF8rn2lCsp3KUrYAo,3697
|
||||
setuptools/wheel.py,sha256=D3BWIyF2yDhKNu14ZtzDUXEcdK6l8tJyHj9M_sR58mA,8682
|
||||
setuptools/windows_support.py,sha256=0qLEFTYEBly5quTV1EciqKwrvS4ZtU4oHWP0Z3-BOYI,720
|
||||
setuptools/_distutils/__init__.py,sha256=xGYuhWwLG07J0Q49BVnEjPy6wyDcd6veJMDJX7ljlyM,359
|
||||
setuptools/_distutils/_collections.py,sha256=d7LOPjTlBpUHK7Wrkqo1Trjeo5sJ-ONRKpWIzfUzS0I,5440
|
||||
setuptools/_distutils/_functools.py,sha256=X0hb3XXNlzU0KNmF1oUO6rMpeAi12wxD5tSJZiXd3Zc,1771
|
||||
setuptools/_distutils/_itertools.py,sha256=dhBUItrpJTFwXeC-RdfkU_YP8JXRoUl6VluRV2op8kY,1453
|
||||
setuptools/_distutils/_log.py,sha256=i-lNTTcXS8TmWITJ6DODGvtW5z5tMattJQ76h8rZxQU,42
|
||||
setuptools/_distutils/_macos_compat.py,sha256=JzUGhF4E5yIITHbUaPobZEWjGHdrrcNV63z86S4RjBc,239
|
||||
setuptools/_distutils/_modified.py,sha256=jaEGgcY7bkqk3olA5cDI9O16usBAz80dajN5oUdkcAo,2411
|
||||
setuptools/_distutils/_msvccompiler.py,sha256=mbj-InpEOUD9R3TT8OPcN-s19nVc7SxNoeq8c0cpxfY,19613
|
||||
setuptools/_distutils/archive_util.py,sha256=hNweEX0yRwSBvKo1JyKOfqPyQENo6o98hH99jeXGbKw,8572
|
||||
setuptools/_distutils/bcppcompiler.py,sha256=ciMNFUbvcolFU7D5eC1FhHgZ4G4-9bhZZ3oD6bhAZq4,14662
|
||||
setuptools/_distutils/ccompiler.py,sha256=kYb8w9ZjqTpgsXQEfoRC9tWZFh8D0YwkQoGSHYkUbo4,48645
|
||||
setuptools/_distutils/cmd.py,sha256=4z-q1CBxUeE3cSsdT0qvIiqmRbyK2VDavWio96n8W3g,17801
|
||||
setuptools/_distutils/config.py,sha256=cCdL7BAavdh_2tDW0EZp1ro1vTeWhFZzLXN1P6HAl6U,5226
|
||||
setuptools/_distutils/core.py,sha256=WPZrrf2mbM9VPg4o_jjARPVgLzQNYd_atMTfNr2P1vI,9372
|
||||
setuptools/_distutils/cygwinccompiler.py,sha256=SBP-3cAMXplgToYmdeYh6riK3JMgrZPNa9dqOqKCiZ0,11945
|
||||
setuptools/_distutils/debug.py,sha256=N6MrTAqK6l9SVk6tWweR108PM8Ol7qNlfyV-nHcLhsY,139
|
||||
setuptools/_distutils/dep_util.py,sha256=xN75p6ZpHhMiHEc-rpL2XilJQynHnDNiafHteaZ4tjU,349
|
||||
setuptools/_distutils/dir_util.py,sha256=aO95MLDq2_Bw7PIheB-IeoYF2xc9yfo-CMlJg2OlwQY,7965
|
||||
setuptools/_distutils/dist.py,sha256=RGSfGMKDrILGwuylDbx7MLr-c298IzyCJKzwO2y1tPY,50116
|
||||
setuptools/_distutils/errors.py,sha256=ZtBwnhDpQA2bxIazPXNDQ25uNxM4p2omsaSRNpV3rpE,3589
|
||||
setuptools/_distutils/extension.py,sha256=qwg1CNJgN2Gq7bFFfQz2Jpb8LfL-Ec2FTli8Q5qhaD4,10197
|
||||
setuptools/_distutils/fancy_getopt.py,sha256=VxwHHtyJi8Qp8ZwKuJhzxSkX9xwqCqqAxq4d8VbkAWY,17831
|
||||
setuptools/_distutils/file_util.py,sha256=3A_9gykBBHFn0Qf8EzkJYaqYzmQb8-kqlHQhsakACvk,7926
|
||||
setuptools/_distutils/filelist.py,sha256=8EQ8bV9x3XAW8da6CIyMH5_qo2Ff0FJFHyVpiPYFWMM,13635
|
||||
setuptools/_distutils/log.py,sha256=VyBs5j7z4-K6XTEEBThUc9HyMpoPLGtQpERqbz5ylww,1200
|
||||
setuptools/_distutils/msvc9compiler.py,sha256=-YXqSr2-OTygm6ScHmKRrI_gLZlUZKEoWHsucNBteL0,30108
|
||||
setuptools/_distutils/msvccompiler.py,sha256=K4SnI248hv37lw8QujtHKtEXbTdWdJwQ5IzarCQW8Xk,23443
|
||||
setuptools/_distutils/py38compat.py,sha256=ip0qdoFG3-1dJ5SsLgSaZpycOjpg5Y_on7btsPDS8kw,205
|
||||
setuptools/_distutils/py39compat.py,sha256=hOsD6lwZLqZoMnacNJ3P6nUA-LJQhEpVtYTzVH0o96M,1964
|
||||
setuptools/_distutils/spawn.py,sha256=9LUwbJsZcwVrMx78ynZIeOFfeGTV41-bf9GgsEguR04,3431
|
||||
setuptools/_distutils/sysconfig.py,sha256=sDrPTFl-QzVlneWA4SaAf_AeTIZE-pytWnxGfRHqBIM,18485
|
||||
setuptools/_distutils/text_file.py,sha256=MSzP1kmjFZOwnrdQsqwG3ZWNa2wQ4OXK75J3SWOX05g,12100
|
||||
setuptools/_distutils/unixccompiler.py,sha256=pgmWd50imVfEshJuy-fNWqcRXDft_C8_UErW68c_n2I,15572
|
||||
setuptools/_distutils/util.py,sha256=HwvslM2iEnIvETbPf6NEc1RCxoYfRH-Pfu5KXzCf5IE,18120
|
||||
setuptools/_distutils/version.py,sha256=tSFYhuKjxom5kONNB3gOTNf52bORqzmUN64LxFT5QoU,12648
|
||||
setuptools/_distutils/versionpredicate.py,sha256=ub3qV0ImwzVR-Ie-s9OtN6QQ-HvDUPwhfsiJWHPAU7U,5205
|
||||
setuptools/_distutils/zosccompiler.py,sha256=mOyTYXmIwq5fE61etbj9QmD2uu3iPDftBKp9c1OauW0,6573
|
||||
setuptools/_distutils/command/__init__.py,sha256=fVUps4DJhvShMAod0y7xl02m46bd7r31irEhNofPrrs,430
|
||||
setuptools/_distutils/command/_framework_compat.py,sha256=0iZdSJYzGRWCCvzRDKE-R0-_yaAYvFMd1ylXb2eYXug,1609
|
||||
setuptools/_distutils/command/bdist.py,sha256=1RJ0crzG6rfzT-rhUE6AERR_2x5ukhCrUQJpwalBKA4,5353
|
||||
setuptools/_distutils/command/bdist_dumb.py,sha256=CUHqSIXmRJjGrf-YcHtGIan7yqmjoDakFmww74L1vYw,4596
|
||||
setuptools/_distutils/command/bdist_rpm.py,sha256=VzZgWkpSz9Imu4eeQvvZWgsLSrr8h7arRjyZEZgO6Aw,21717
|
||||
setuptools/_distutils/command/build.py,sha256=etXMnBBx1ugT3knALXT3xH1kHvxE-Dkmp2okjxnWpuE,5575
|
||||
setuptools/_distutils/command/build_clib.py,sha256=KY94Cv6dpWyOjXLq1wiIpiGgkM_JajbQllm_voL0hNs,7686
|
||||
setuptools/_distutils/command/build_ext.py,sha256=ObN29w7LCc9UpxJmwFj6VAZgYYNit5BsvVs3qRpEQ-M,31793
|
||||
setuptools/_distutils/command/build_py.py,sha256=fI2fSFNlhQSi0nMKiTCJYWF6VEKx8aTviP3vzzipDJk,16539
|
||||
setuptools/_distutils/command/build_scripts.py,sha256=CYvH30nPYs0hFUIBsSXi4zsW5ZaZxZVaWodJdjzWJDo,5536
|
||||
setuptools/_distutils/command/check.py,sha256=eeoq60XhOfl7H8fy0aq0H6TCyZvL-JhnXtXAnsYSQgQ,4912
|
||||
setuptools/_distutils/command/clean.py,sha256=RFdfEwNlJZ-Sw_UPPYrcxqa0WIX9NHQE8pveS4BcS8A,2595
|
||||
setuptools/_distutils/command/config.py,sha256=Bhkk4KgD95HV7viLDPQqJF6kYCFbMle-2qZBTTWlPq0,13007
|
||||
setuptools/_distutils/command/install.py,sha256=FUQzZL_9dj8GuujA4l34jZiQHnew-p7Btfe6FTc6OhM,30128
|
||||
setuptools/_distutils/command/install_data.py,sha256=gl2h3cPwN4h388BKDazTtNt0J8KP9SXbbgB3MNVLLXg,2757
|
||||
setuptools/_distutils/command/install_egg_info.py,sha256=Sy2fuBqvCGlQVhJcUteGbdG0KCx2BBN7jAzWEMA9rp8,2788
|
||||
setuptools/_distutils/command/install_headers.py,sha256=v-QcVkjaWX5yf0xaup9_KySanVlmd6LhuzEhGpmTiTU,1180
|
||||
setuptools/_distutils/command/install_lib.py,sha256=x_6ULzcpTYXNEMrKe25h74tKlsaMe7exu2UCHL5tlok,8408
|
||||
setuptools/_distutils/command/install_scripts.py,sha256=hCYKJ2GzfuZK7RgSNDcVI53c1o4U7rHDFDB96N71Z0E,1933
|
||||
setuptools/_distutils/command/register.py,sha256=GIbedaPEuVJasaJ6LO3i9PJ1tkjQAWKFn20o9JznazQ,11805
|
||||
setuptools/_distutils/command/sdist.py,sha256=vDwW5w9ev4znK0ayOKYGysSzHWLhLz6_jaVKWdzHGEc,19188
|
||||
setuptools/_distutils/command/upload.py,sha256=65zhvH62G7Q3MdO5IXtMWtFNLNSYVengkh_LNDGiBng,7497
|
||||
setuptools/_distutils/compat/__init__.py,sha256=fdLzjO_CkfzBFwOJX9XIfXbBkeYKzMiVABd5T4dLZOU,417
|
||||
setuptools/_distutils/compat/py38.py,sha256=2hge9_F3LivW4R4ptFqsG5sb6WrsQc9V30lwMWv86hg,567
|
||||
setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130
|
||||
setuptools/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
||||
setuptools/_vendor/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
|
||||
setuptools/_vendor/importlib_metadata/__init__.py,sha256=fQEsJb7Gs_9Vq9V0xHICB0EFxNRGyxubr4w4ZFmGcxY,26498
|
||||
setuptools/_vendor/importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454
|
||||
setuptools/_vendor/importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
|
||||
setuptools/_vendor/importlib_metadata/_compat.py,sha256=GtdqmFy_ykVSTkz6MdGL2g3V5kxvQKHTWxKZCk5Q59Q,1859
|
||||
setuptools/_vendor/importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895
|
||||
setuptools/_vendor/importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068
|
||||
setuptools/_vendor/importlib_metadata/_meta.py,sha256=v5e1ZDG7yZTH3h7TjbS5bM5p8AGzMPVOu8skDMv4h6k,1165
|
||||
setuptools/_vendor/importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098
|
||||
setuptools/_vendor/importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
|
||||
setuptools/_vendor/importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
|
||||
setuptools/_vendor/importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
|
||||
setuptools/_vendor/importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
|
||||
setuptools/_vendor/importlib_resources/_compat.py,sha256=L8HTWyAC_MIKuxWZuw0zvTq5qmUA0ttrvK941OzDKU8,2925
|
||||
setuptools/_vendor/importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
|
||||
setuptools/_vendor/importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481
|
||||
setuptools/_vendor/importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140
|
||||
setuptools/_vendor/importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
|
||||
setuptools/_vendor/importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
|
||||
setuptools/_vendor/jaraco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/jaraco/context.py,sha256=cSeYAAUQS_mPUABUWvk-FRHw1nWcDYEgd6SysLxvHTw,9570
|
||||
setuptools/_vendor/jaraco/functools/__init__.py,sha256=EMUExgjM40npn-5G6NgcUI5Mmc74WyYPavjtmxRATH0,16696
|
||||
setuptools/_vendor/jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
|
||||
setuptools/_vendor/jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/jaraco/text/__init__.py,sha256=KfFGMerrkN_0V0rgtJVx-9dHt3tW7i_uJypjwEcLtC0,15517
|
||||
setuptools/_vendor/more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82
|
||||
setuptools/_vendor/more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
|
||||
setuptools/_vendor/more_itertools/more.py,sha256=0rB_mibFR51sq33UlAI_bWfaNdsYNnJr1v6S0CaW7QA,117959
|
||||
setuptools/_vendor/more_itertools/more.pyi,sha256=r32pH2raBC1zih3evK4fyvAXvrUamJqc6dgV7QCRL_M,14977
|
||||
setuptools/_vendor/more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/more_itertools/recipes.py,sha256=UkNkrsZyqiwgLHANBTmvMhCvaNSvSNYhyOpz_Jc55DY,16256
|
||||
setuptools/_vendor/more_itertools/recipes.pyi,sha256=9BpeKd5_qalYVSnuHfqPSCfoGgqnQY2Xu9pNwrDlHU8,3551
|
||||
setuptools/_vendor/packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496
|
||||
setuptools/_vendor/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
|
||||
setuptools/_vendor/packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590
|
||||
setuptools/_vendor/packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676
|
||||
setuptools/_vendor/packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347
|
||||
setuptools/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
||||
setuptools/_vendor/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
|
||||
setuptools/_vendor/packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
|
||||
setuptools/_vendor/packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036
|
||||
setuptools/_vendor/packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
|
||||
setuptools/_vendor/packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784
|
||||
setuptools/_vendor/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
|
||||
setuptools/_vendor/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
|
||||
setuptools/_vendor/packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236
|
||||
setuptools/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396
|
||||
setuptools/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633
|
||||
setuptools/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943
|
||||
setuptools/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254
|
||||
setuptools/_vendor/tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
|
||||
setuptools/command/__init__.py,sha256=HZlSppOB8Vro73ffvP-xrORuMrh4GnVkOqJspFRG8Pg,396
|
||||
setuptools/command/_requirestxt.py,sha256=XZG2b3CDbL_yGpNl7UL7f5kXMb9OpzQqfxkGnolkySs,4222
|
||||
setuptools/command/alias.py,sha256=1holrSsdYxp1Esoa2yfRHLjiYlCRi3jYZy2yWm62YVU,2383
|
||||
setuptools/command/bdist_egg.py,sha256=6y2Rfn3lURlGyaCF3ox3nPfoKgA_ZX_m0bREQ4jKsrA,16457
|
||||
setuptools/command/bdist_rpm.py,sha256=ilQc1XVNBxv1O-8URD3iSTqszndQ2SWBpZVmrR2awO0,1289
|
||||
setuptools/command/build.py,sha256=CcnOCLzy0rxoc9W14wAcA4AFQdbRlJ5Nx8HSgioTFxw,5792
|
||||
setuptools/command/build_clib.py,sha256=0Ab0Kppziw4ng_51ODfQxF8HxU6ea89EEyBC8kWHtnc,4539
|
||||
setuptools/command/build_ext.py,sha256=9Jaand-70znOdTFPvogYPp9s5Gjp2aXuzCq07W1bTE4,17735
|
||||
setuptools/command/build_py.py,sha256=x-WK8RIooqdA4HI9Vwplb0M7-UN0U3QVYXI0wAgvA_4,15127
|
||||
setuptools/command/develop.py,sha256=o5yZVR39-iIdCQP5_Rib8Yx432azF1H1V2Kl3uCbtyk,6889
|
||||
setuptools/command/dist_info.py,sha256=f3bldKO2zvrZPu9emGLnfKJDdSqLzIdVFqgDw3_-JWI,3507
|
||||
setuptools/command/easy_install.py,sha256=mDqHVRyiNPMiYJyEm9lJhd0oamKnh4JGcMm9i7RwBEU,87110
|
||||
setuptools/command/editable_wheel.py,sha256=jyQir4KncYVKJ59JHC6FstDMUMMz-voCPe0M1GgcVI0,35349
|
||||
setuptools/command/egg_info.py,sha256=mtxpG6vx11vV4Pda_dFJ5OGszaaW09KkalTq0v6MJD4,26516
|
||||
setuptools/command/install.py,sha256=XRyF0rcuRLsfsOWPWOhD14G59xTB6IN9ddNcrxz5b7Q,5779
|
||||
setuptools/command/install_egg_info.py,sha256=zpDDCmOJspfkEekUON7wU0ABFNW-0uXUZpzpHRYUdiI,2066
|
||||
setuptools/command/install_lib.py,sha256=gUEW1ACrDcK_Mq7_RiF3YUlKA-9e-Tq9AcQs7KA-glk,3870
|
||||
setuptools/command/install_scripts.py,sha256=pq6GOQ78e8aAEkV8-r6b8iulqqlIHCamLFcy_qx2yXM,2419
|
||||
setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628
|
||||
setuptools/command/register.py,sha256=kk3DxXCb5lXTvqnhfwx2g6q7iwbUmgTyXUCaBooBOUk,468
|
||||
setuptools/command/rotate.py,sha256=P9qx7ddEo3iExuagaDgM1Z2r-x7Rd87IngXNkClu1Ig,2132
|
||||
setuptools/command/saveopts.py,sha256=mVAPMRIGE98gl6eXQ3C2Wo-qPOgl9lbH-Q_YsbLuqeg,657
|
||||
setuptools/command/sdist.py,sha256=xOp-NirTZBB2twdMB8bxVUiT3vE0qjyZBs4xwADrVEw,6811
|
||||
setuptools/command/setopt.py,sha256=w_7z7z_DDPNlY7rZIIsgdABfZD3VxCdLre3Edu8JloE,5018
|
||||
setuptools/command/test.py,sha256=DuDGCsBp2IjxCI8NHQ-71KseaN6ln1y-nIwBbLrxRu0,8101
|
||||
setuptools/command/upload.py,sha256=XT3YFVfYPAmA5qhGg0euluU98ftxRUW-PzKcODMLxUs,462
|
||||
setuptools/command/upload_docs.py,sha256=5zI6dqDdGl2ekSuMq1IcRRbh-9l-ivfb4Mq-UWM0rj4,7821
|
||||
setuptools/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/compat/py310.py,sha256=JSoeLHioO2fx1LVGZDL8IAU_7SDpRU33XBnPXLTMdyM,165
|
||||
setuptools/compat/py311.py,sha256=6qfRL57v2DWBBQdqv-w_T70KxK0iowZiCLVhESfj36Y,330
|
||||
setuptools/compat/py39.py,sha256=BJMtnkfcqyTfccqjYQxfoRtU2nTnWaEESBVkshTiXqY,493
|
||||
setuptools/config/__init__.py,sha256=aiPnL9BJn1O6MfmuNXyn8W2Lp8u9qizRVqwPiOdPIjY,1499
|
||||
setuptools/config/_apply_pyprojecttoml.py,sha256=Rg4IDYrd6Rq2Lti8r2nahqH9cYN-RMtKX_3fOAEWHOo,14791
|
||||
setuptools/config/expand.py,sha256=I7OGe4UsgZJA_efWGgdMDSnRfZ2FHHUZXXCAMdZtSfo,16456
|
||||
setuptools/config/pyprojecttoml.py,sha256=Ngimj2-KYT3YKH1xi4lc-4PSenC0RAMoqaC1MquL2mE,17701
|
||||
setuptools/config/setupcfg.py,sha256=9mffjaaI8OMxu-PLUizoYewbPskeztjc88odyKTzgkY,25630
|
||||
setuptools/config/_validate_pyproject/__init__.py,sha256=dnp6T7ePP1R5z4OuC7Fd2dkFlIrtIfizUfvpGJP6nz0,1042
|
||||
setuptools/config/_validate_pyproject/error_reporting.py,sha256=_w11R1Znez8UKLlbgXMY_ky6-eb8N94NbG1fjwNFAEg,11862
|
||||
setuptools/config/_validate_pyproject/extra_validations.py,sha256=kd0SWYrsp3IXF9KGAM8QpeaKpGyMsNgL-tjh9TPfhyY,1625
|
||||
setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py,sha256=w749JgqKi8clBFcObdcbZVqsmF4oJ_QByhZ1SGbUFNw,1612
|
||||
setuptools/config/_validate_pyproject/fastjsonschema_validations.py,sha256=WIbuTZNpogmqw2iMwn87SPsDOaiiwUj_SUMVbIXKRwY,295453
|
||||
setuptools/config/_validate_pyproject/formats.py,sha256=e9BweEdGUuK_TDIL111I3UeNBAUt8MIkQOsgsAJFuMM,12066
|
||||
setuptools/extern/__init__.py,sha256=hsOvIHnQLmaefxg3ntPKxgbpORscK1TF8jh_MPUbMeA,2727
|
||||
setuptools-70.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools-70.0.0.dist-info/METADATA,sha256=C_sdS4alOQIAq0maPGdSFC95PDSantqk7wk9LCQStQo,5870
|
||||
setuptools-70.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools-70.0.0.dist-info/entry_points.txt,sha256=Fe-UZkzgLTUZQOH94hbLTyP4HxM1nxlMuEZ_rS6zNnE,2676
|
||||
setuptools-70.0.0.dist-info/top_level.txt,sha256=d9yL39v_W7qmKDDSH6sT4bE0j_Ls1M3P161OGgdsm4g,41
|
||||
setuptools-70.0.0.dist-info/RECORD,,
|
@ -1,10 +1,9 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: setuptools
|
||||
Version: 70.0.0
|
||||
Version: 74.0.0
|
||||
Summary: Easily download, build, install, upgrade, and uninstall Python packages
|
||||
Home-page: https://github.com/pypa/setuptools
|
||||
Author: Python Packaging Authority
|
||||
Author-email: distutils-sig@python.org
|
||||
Author-email: Python Packaging Authority <distutils-sig@python.org>
|
||||
Project-URL: Source, https://github.com/pypa/setuptools
|
||||
Project-URL: Documentation, https://setuptools.pypa.io/
|
||||
Project-URL: Changelog, https://setuptools.pypa.io/en/stable/history.html
|
||||
Keywords: CPAN PyPI distutils eggs package management
|
||||
@ -18,50 +17,67 @@ Classifier: Topic :: System :: Archiving :: Packaging
|
||||
Classifier: Topic :: System :: Systems Administration
|
||||
Classifier: Topic :: Utilities
|
||||
Requires-Python: >=3.8
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
Provides-Extra: certs
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx >=3.5 ; extra == 'docs'
|
||||
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
|
||||
Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
|
||||
Requires-Dist: furo ; extra == 'docs'
|
||||
Requires-Dist: sphinx-lint ; extra == 'docs'
|
||||
Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
|
||||
Requires-Dist: pygments-github-lexers ==0.0.5 ; extra == 'docs'
|
||||
Requires-Dist: sphinx-favicon ; extra == 'docs'
|
||||
Requires-Dist: sphinx-inline-tabs ; extra == 'docs'
|
||||
Requires-Dist: sphinx-reredirects ; extra == 'docs'
|
||||
Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs'
|
||||
Requires-Dist: sphinx-notfound-page <2,>=1 ; extra == 'docs'
|
||||
Requires-Dist: pyproject-hooks !=1.1 ; extra == 'docs'
|
||||
Provides-Extra: check
|
||||
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check'
|
||||
Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check'
|
||||
Requires-Dist: ruff >=0.5.2 ; (sys_platform != "cygwin") and extra == 'check'
|
||||
Provides-Extra: core
|
||||
Requires-Dist: packaging >=24 ; extra == 'core'
|
||||
Requires-Dist: more-itertools >=8.8 ; extra == 'core'
|
||||
Requires-Dist: jaraco.text >=3.7 ; extra == 'core'
|
||||
Requires-Dist: wheel >=0.43.0 ; extra == 'core'
|
||||
Requires-Dist: platformdirs >=2.6.2 ; extra == 'core'
|
||||
Requires-Dist: importlib-metadata >=6 ; (python_version < "3.10") and extra == 'core'
|
||||
Requires-Dist: tomli >=2.0.1 ; (python_version < "3.11") and extra == 'core'
|
||||
Requires-Dist: importlib-resources >=5.10.2 ; (python_version < "3.9") and extra == 'core'
|
||||
Provides-Extra: cover
|
||||
Requires-Dist: pytest-cov ; extra == 'cover'
|
||||
Provides-Extra: doc
|
||||
Requires-Dist: sphinx >=3.5 ; extra == 'doc'
|
||||
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
|
||||
Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
|
||||
Requires-Dist: furo ; extra == 'doc'
|
||||
Requires-Dist: sphinx-lint ; extra == 'doc'
|
||||
Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
|
||||
Requires-Dist: pygments-github-lexers ==0.0.5 ; extra == 'doc'
|
||||
Requires-Dist: sphinx-favicon ; extra == 'doc'
|
||||
Requires-Dist: sphinx-inline-tabs ; extra == 'doc'
|
||||
Requires-Dist: sphinx-reredirects ; extra == 'doc'
|
||||
Requires-Dist: sphinxcontrib-towncrier ; extra == 'doc'
|
||||
Requires-Dist: sphinx-notfound-page <2,>=1 ; extra == 'doc'
|
||||
Requires-Dist: pyproject-hooks !=1.1 ; extra == 'doc'
|
||||
Requires-Dist: towncrier <24.7 ; extra == 'doc'
|
||||
Provides-Extra: enabler
|
||||
Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler'
|
||||
Provides-Extra: ssl
|
||||
Provides-Extra: testing
|
||||
Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
|
||||
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
|
||||
Requires-Dist: pytest-mypy ; extra == 'testing'
|
||||
Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
|
||||
Requires-Dist: virtualenv >=13.0.0 ; extra == 'testing'
|
||||
Requires-Dist: wheel ; extra == 'testing'
|
||||
Requires-Dist: pip >=19.1 ; extra == 'testing'
|
||||
Requires-Dist: packaging >=23.2 ; extra == 'testing'
|
||||
Requires-Dist: jaraco.envs >=2.2 ; extra == 'testing'
|
||||
Requires-Dist: pytest-xdist >=3 ; extra == 'testing'
|
||||
Requires-Dist: jaraco.path >=3.2.0 ; extra == 'testing'
|
||||
Requires-Dist: build[virtualenv] >=1.0.3 ; extra == 'testing'
|
||||
Requires-Dist: filelock >=3.4.0 ; extra == 'testing'
|
||||
Requires-Dist: ini2toml[lite] >=0.14 ; extra == 'testing'
|
||||
Requires-Dist: tomli-w >=1.0.0 ; extra == 'testing'
|
||||
Requires-Dist: pytest-timeout ; extra == 'testing'
|
||||
Requires-Dist: pytest-home >=0.5 ; extra == 'testing'
|
||||
Requires-Dist: mypy ==1.9 ; extra == 'testing'
|
||||
Requires-Dist: tomli ; extra == 'testing'
|
||||
Requires-Dist: importlib-metadata ; extra == 'testing'
|
||||
Requires-Dist: pytest-subprocess ; extra == 'testing'
|
||||
Requires-Dist: pyproject-hooks !=1.1 ; extra == 'testing'
|
||||
Requires-Dist: pytest-cov ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
||||
Requires-Dist: jaraco.develop >=7.21 ; (python_version >= "3.9" and sys_platform != "cygwin") and extra == 'testing'
|
||||
Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'testing'
|
||||
Requires-Dist: pytest-perf ; (sys_platform != "cygwin") and extra == 'testing'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
|
||||
Requires-Dist: virtualenv >=13.0.0 ; extra == 'test'
|
||||
Requires-Dist: wheel >=0.44.0 ; extra == 'test'
|
||||
Requires-Dist: pip >=19.1 ; extra == 'test'
|
||||
Requires-Dist: packaging >=23.2 ; extra == 'test'
|
||||
Requires-Dist: jaraco.envs >=2.2 ; extra == 'test'
|
||||
Requires-Dist: pytest-xdist >=3 ; extra == 'test'
|
||||
Requires-Dist: jaraco.path >=3.2.0 ; extra == 'test'
|
||||
Requires-Dist: build[virtualenv] >=1.0.3 ; extra == 'test'
|
||||
Requires-Dist: filelock >=3.4.0 ; extra == 'test'
|
||||
Requires-Dist: ini2toml[lite] >=0.14 ; extra == 'test'
|
||||
Requires-Dist: tomli-w >=1.0.0 ; extra == 'test'
|
||||
Requires-Dist: pytest-timeout ; extra == 'test'
|
||||
Requires-Dist: pytest-home >=0.5 ; extra == 'test'
|
||||
Requires-Dist: pytest-subprocess ; extra == 'test'
|
||||
Requires-Dist: pyproject-hooks !=1.1 ; extra == 'test'
|
||||
Requires-Dist: jaraco.test ; extra == 'test'
|
||||
Requires-Dist: jaraco.develop >=7.21 ; (python_version >= "3.9" and sys_platform != "cygwin") and extra == 'test'
|
||||
Requires-Dist: pytest-perf ; (sys_platform != "cygwin") and extra == 'test'
|
||||
Provides-Extra: type
|
||||
Requires-Dist: pytest-mypy ; extra == 'type'
|
||||
Requires-Dist: mypy ==1.11.* ; extra == 'type'
|
||||
Requires-Dist: importlib-metadata >=7.0.2 ; (python_version < "3.10") and extra == 'type'
|
||||
Requires-Dist: jaraco.develop >=7.21 ; (sys_platform != "cygwin") and extra == 'type'
|
||||
|
||||
.. |pypi-version| image:: https://img.shields.io/pypi/v/setuptools.svg
|
||||
:target: https://pypi.org/project/setuptools
|
578
third_party/python/setuptools/setuptools-74.0.0.dist-info/RECORD
vendored
Normal file
578
third_party/python/setuptools/setuptools-74.0.0.dist-info/RECORD
vendored
Normal file
@ -0,0 +1,578 @@
|
||||
distutils-precedence.pth,sha256=JjjOniUA5XKl4N5_rtZmHrVp0baW_LoHsN0iPaX10iQ,151
|
||||
_distutils_hack/__init__.py,sha256=oyXE9i-SoZ2qpVgIjkM5r9eOFN9Awgp9UcncOnFNwjc,6754
|
||||
_distutils_hack/override.py,sha256=Eu_s-NF6VIZ4Cqd0tbbA5wtWky2IZPNd8et6GLt1mzo,44
|
||||
pkg_resources/__init__.py,sha256=-d69BOtMx-kE22-8JSKHJD4hx-dd-8c8v8G_nT1HcuQ,126236
|
||||
pkg_resources/api_tests.txt,sha256=XEdvy4igHHrq2qNHNMHnlfO6XSQKNqOyLHbl6QcpfAI,12595
|
||||
pkg_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/tests/test_find_distributions.py,sha256=U91cov5L1COAIWLNq3Xy4plU7_MnOE1WtXMu6iV2waM,1972
|
||||
pkg_resources/tests/test_integration_zope_interface.py,sha256=nzVoK557KZQN0V3DIQ1sVeaCOgt4Kpl-CODAWsO7pmc,1652
|
||||
pkg_resources/tests/test_markers.py,sha256=0orKg7UMDf7fnuNQvRMOc-EF9EAP_JTQnk4mtGgbW50,241
|
||||
pkg_resources/tests/test_pkg_resources.py,sha256=lwOYAJOui0e86isYT7cq9OJLqsB8eXHBqTlrQOaoPIg,15221
|
||||
pkg_resources/tests/test_resources.py,sha256=vjvbX9SkDRLcxIO5_g3ZuX-ZiWUlWoaXNHdrEUyxFds,31252
|
||||
pkg_resources/tests/test_working_set.py,sha256=v3lf3NSsGGNwKJFZY7y1C5_cp-dnXsoxBuSxe1Rwk78,8531
|
||||
pkg_resources/tests/data/my-test-package-source/setup.cfg,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/tests/data/my-test-package-source/setup.py,sha256=1VobhAZbMb7M9mfhb_NE8PwDsvukoWLs9aUAS0pYhe8,105
|
||||
pkg_resources/tests/data/my-test-package-zip/my-test-package.zip,sha256=AYRcQ39GVePPnMT8TknP1gdDHyJnXhthESmpAjnzSCI,1809
|
||||
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO,sha256=JvWv9Io2PAuYwEEw2fBW4Qc5YvdbkscpKX1kmLzsoHk,187
|
||||
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt,sha256=4ClkH8eTovZrdVrJFsVuxdbMEF--lBVSuKonDAPE5Jc,208
|
||||
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
||||
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
||||
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
||||
pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg,sha256=ZTlMGxjRGiKDNkiA2c75jbQH2TWIteP00irF9gvczbo,843
|
||||
setuptools/__init__.py,sha256=I-xDUlHLkf5CSKwavhP9tptlt5zxVBAlegIrPae854I,10389
|
||||
setuptools/_core_metadata.py,sha256=7xeUsLocaSZQlgRQ81-HVRMWUWFXMyFJpVyjCDJgXS0,9795
|
||||
setuptools/_entry_points.py,sha256=Y3QUE9JKFW_YyquDnpffNWSs6f3jKEt1e-dnx--9-Kw,2310
|
||||
setuptools/_imp.py,sha256=b5sEz-EOJKApMsmb-yJPPgkGqZBeZFeLtoWhysLre-0,2441
|
||||
setuptools/_importlib.py,sha256=rrebj_YuvTeVM-A3gFJMp8lRgSAmD2fA-NPXVsxzt5w,327
|
||||
setuptools/_itertools.py,sha256=jWRfsIrpC7myooz3hDURj9GtvpswZeKXg2HakmEhNjo,657
|
||||
setuptools/_normalization.py,sha256=t-SeXc0jU2weQY9BA3qGlRThzBND1oYK5Hpzg1_533g,4536
|
||||
setuptools/_path.py,sha256=cjrnds-WCfAgBYPHh2FsnZF294pcKQnJiVa7VnqA4_I,2700
|
||||
setuptools/_reqs.py,sha256=Yp1pm07CHu1QUsoa6Cfa3m8B6LRZyonujt1CHb-uztQ,1411
|
||||
setuptools/archive_util.py,sha256=8C7X1KZXphqLkVpZdgme8bm2U9CPQ3tmnxpWMpbH0mM,7332
|
||||
setuptools/build_meta.py,sha256=fqJx2glC_hPOq3Md2MXmkyPlbtARZCPxlbigDl5fehg,19151
|
||||
setuptools/cli-32.exe,sha256=MqzBvFQxFsviz_EMuGd3LfLyVP8mNMhwrvC0bEtpb9s,11776
|
||||
setuptools/cli-64.exe,sha256=u7PeVwdinmpgoMI4zUd7KPB_AGaYL9qVP6b87DkHOko,14336
|
||||
setuptools/cli-arm64.exe,sha256=uafQjaiA36yLz1SOuksG-1m28JsX0zFIoPZhgyiSbGE,13824
|
||||
setuptools/cli.exe,sha256=MqzBvFQxFsviz_EMuGd3LfLyVP8mNMhwrvC0bEtpb9s,11776
|
||||
setuptools/depends.py,sha256=IsPdLBE6XpptUyp8WtihOD8O-PBnr2RezeiKBFT5Stc,5542
|
||||
setuptools/discovery.py,sha256=kEBc8vqi1Zq8kIGH80q0Q_xp4fFdHabQQM2EGg2eGY0,21104
|
||||
setuptools/dist.py,sha256=6bzrVzhkPdnB1AKREjMH37deKyKperYVD15yT1H55NA,36729
|
||||
setuptools/errors.py,sha256=qdHD5FonhNM6gkd863-49YacSXkR_kzh3D0lY0h2VZI,2988
|
||||
setuptools/extension.py,sha256=J39-5uS3_JN0N6ea5wPughSN0G919lj3YNRG7gx7I4I,6455
|
||||
setuptools/glob.py,sha256=iMCy0lhirF29sz2fa9B6KVGVvUniy11SFpdLl1UCb-I,4852
|
||||
setuptools/gui-32.exe,sha256=hdrh6V13hF8stZvKw9Sv50u-TJGpvMW_SnHNQxBNvnw,11776
|
||||
setuptools/gui-64.exe,sha256=NHG2FA6txkEid9u-_j_vjDRaDxpZd2CGuAo2GMOoPjs,14336
|
||||
setuptools/gui-arm64.exe,sha256=5pT0dDQFyLWSb_RX22_n8aEt7HwWqcOGR4TT9OB64Jc,13824
|
||||
setuptools/gui.exe,sha256=hdrh6V13hF8stZvKw9Sv50u-TJGpvMW_SnHNQxBNvnw,11776
|
||||
setuptools/installer.py,sha256=WF8FZiQXP2rNs_LpX_8C_A8l7sZivn5Xzk9g6lQG2g4,4970
|
||||
setuptools/launch.py,sha256=VsYBvZhikv-o_uJc60yM-2_PCQB5P5vSFnLl5C5BJ5A,812
|
||||
setuptools/logging.py,sha256=BYKd0bzbiCvUB9XEtfCYaYh9F_QNpxYjRsF2Xmw51k8,1241
|
||||
setuptools/modified.py,sha256=GGzc71yEkfwLV1-HqXRwwsnzhnb97k1D3R7H8EE_0z4,190
|
||||
setuptools/monkey.py,sha256=fx7_00njkB2cCrdtTWqZDKzdKsTz9d9GHZtMBozgwOo,3573
|
||||
setuptools/namespaces.py,sha256=T__C4WtJJsCRgJ86czU4eAvqDJOBCxWt-78k5_RnWVM,3128
|
||||
setuptools/package_index.py,sha256=9xqvwwBgse9Lq6R6IRcXqSWJSPIz_cHZz1pckNTgQnM,39051
|
||||
setuptools/sandbox.py,sha256=iPmzIPvHDl42cwFK0gnUpWkIzigXXjdTIy_8EYGgyQQ,14550
|
||||
setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218
|
||||
setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138
|
||||
setuptools/unicode_utils.py,sha256=aDvIZZPtMn3eC1cpS4ta0WugZSZ7spoKyxGqZd7AHzw,3181
|
||||
setuptools/version.py,sha256=WJCeUuyq74Aok2TeK9-OexZOu8XrlQy7-y0BEuWNovQ,161
|
||||
setuptools/warnings.py,sha256=Xz3eESrYEdP0dYlVerP8BA0es-UbkFRAE4gOnXUmxuI,3714
|
||||
setuptools/wheel.py,sha256=w-F8n9qddusDCadC8PWJlkZ5uvlXkaGytQgnkenZGiw,8628
|
||||
setuptools/windows_support.py,sha256=wW4IYLM1Bv7Z1MaauP2xmPjyy-wkmQnXdyvXscAf9fw,726
|
||||
setuptools/_distutils/__init__.py,sha256=xGYuhWwLG07J0Q49BVnEjPy6wyDcd6veJMDJX7ljlyM,359
|
||||
setuptools/_distutils/_collections.py,sha256=jbayD_dCSuJr7YJ63pKAgGbNzrHtnvz0bd-63WDKmDY,1366
|
||||
setuptools/_distutils/_functools.py,sha256=X0hb3XXNlzU0KNmF1oUO6rMpeAi12wxD5tSJZiXd3Zc,1771
|
||||
setuptools/_distutils/_itertools.py,sha256=dhBUItrpJTFwXeC-RdfkU_YP8JXRoUl6VluRV2op8kY,1453
|
||||
setuptools/_distutils/_log.py,sha256=i-lNTTcXS8TmWITJ6DODGvtW5z5tMattJQ76h8rZxQU,42
|
||||
setuptools/_distutils/_macos_compat.py,sha256=JzUGhF4E5yIITHbUaPobZEWjGHdrrcNV63z86S4RjBc,239
|
||||
setuptools/_distutils/_modified.py,sha256=D-6zIm54enmSK_9BpoG8vFSuEdaL1rUZ5HS8xxi6TQ0,2440
|
||||
setuptools/_distutils/_msvccompiler.py,sha256=841XUInCm0RfqzB8M5o7ehT1X6WX9syoi7fTwrQxZ-k,20915
|
||||
setuptools/_distutils/archive_util.py,sha256=mJ1vZJLEk4dG4inAr-X7Wgljlf2DvkPUN1TynAja1JM,8523
|
||||
setuptools/_distutils/bcppcompiler.py,sha256=KOWXKflXxzTNNwR02kf_rrsxhxez9FAnrN88MQEqTHU,14647
|
||||
setuptools/_distutils/ccompiler.py,sha256=h3xwcs4Y-OP_uxqqKXPNea9WG6iBy6XZBwEzidgQhOM,48869
|
||||
setuptools/_distutils/cmd.py,sha256=sZeDnYHuZqykNMOlX4usslxJ4jwPjAXKJvXYu5o7tns,17877
|
||||
setuptools/_distutils/config.py,sha256=5MRWmllnvszIvl4OXnzmTdOhMHQZ90ci-n7TDp82nJc,5226
|
||||
setuptools/_distutils/core.py,sha256=0H9EOr1daiLCThjEQLxD1oiRAo1IlGdJgkC7GdO0p5w,9318
|
||||
setuptools/_distutils/cygwinccompiler.py,sha256=9Y2HcNhowTPz2hXu5peDRuxhAQWljueb6bdFkiMPpOk,11654
|
||||
setuptools/_distutils/debug.py,sha256=N6MrTAqK6l9SVk6tWweR108PM8Ol7qNlfyV-nHcLhsY,139
|
||||
setuptools/_distutils/dep_util.py,sha256=xN75p6ZpHhMiHEc-rpL2XilJQynHnDNiafHteaZ4tjU,349
|
||||
setuptools/_distutils/dir_util.py,sha256=D2B5iwrxwjhPpp6cmc0gbjTuQz7CCiSYJFi9vK5k82c,8007
|
||||
setuptools/_distutils/dist.py,sha256=YRzHCzON82ZGm34SaeVJ3PVHN82aTrJ8PUpPNuOFYrs,50561
|
||||
setuptools/_distutils/errors.py,sha256=ZtBwnhDpQA2bxIazPXNDQ25uNxM4p2omsaSRNpV3rpE,3589
|
||||
setuptools/_distutils/extension.py,sha256=2SLJ8vzYZn_HPEAIUqExIdJ22ySxlYoEEejbrMoBBSc,10358
|
||||
setuptools/_distutils/fancy_getopt.py,sha256=FfBKjfzAXOwg1wJIVDBIoTgals9-XLpQdi-YbZS82Jw,17822
|
||||
setuptools/_distutils/file_util.py,sha256=jpPCdgpEN876TFSWEKPzEbj4hZrATpZKPQDOT4G-yHQ,7962
|
||||
setuptools/_distutils/filelist.py,sha256=PjeVfpvvjY00sOwofd4d79EUjmI_c7udePCL6REbYzM,13654
|
||||
setuptools/_distutils/log.py,sha256=VyBs5j7z4-K6XTEEBThUc9HyMpoPLGtQpERqbz5ylww,1200
|
||||
setuptools/_distutils/spawn.py,sha256=u5srFcVoBxOweFlWtZRjO9L__tRcOQvLH8DAel5kZSg,3625
|
||||
setuptools/_distutils/sysconfig.py,sha256=JjX-MR1UqO3qHOsrYgZW-vS3xx49j711pGt1OtaKrMY,19229
|
||||
setuptools/_distutils/text_file.py,sha256=EV8hDCetYeKq6c_uPfwGgkUw8hugeEoSJcXpy_EkQGo,12098
|
||||
setuptools/_distutils/unixccompiler.py,sha256=QJHNcQiMtWcOQ4WzupzIv1nQwBENo-bNkeVCSVmT4Jk,15437
|
||||
setuptools/_distutils/util.py,sha256=ZibjM8bCOu9bbC-RWY7_1cTBfxgCPYNnlUGeqsSU0d0,17648
|
||||
setuptools/_distutils/version.py,sha256=KlbDjAvGBybVJqRDxNLNMvZLl5XL2FP0dpVjgzfm0zY,12634
|
||||
setuptools/_distutils/versionpredicate.py,sha256=qBWQ6wTj12ODytoTmIydefIY2jb4uY1sdbgbuLn-IJM,5205
|
||||
setuptools/_distutils/zosccompiler.py,sha256=bb2dQoGnsv4LmoQBfjhDsaOpt_p5R7y_28l1cltmG94,6589
|
||||
setuptools/_distutils/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_distutils/_vendor/packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496
|
||||
setuptools/_distutils/_vendor/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
|
||||
setuptools/_distutils/_vendor/packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590
|
||||
setuptools/_distutils/_vendor/packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676
|
||||
setuptools/_distutils/_vendor/packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347
|
||||
setuptools/_distutils/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
||||
setuptools/_distutils/_vendor/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
|
||||
setuptools/_distutils/_vendor/packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
|
||||
setuptools/_distutils/_vendor/packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036
|
||||
setuptools/_distutils/_vendor/packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_distutils/_vendor/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
|
||||
setuptools/_distutils/_vendor/packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784
|
||||
setuptools/_distutils/_vendor/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
|
||||
setuptools/_distutils/_vendor/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
|
||||
setuptools/_distutils/_vendor/packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236
|
||||
setuptools/_distutils/command/__init__.py,sha256=CeFXxDglGoEJs2OaPuOGVx_eWb_yIOua3nkBfzjdwQU,416
|
||||
setuptools/_distutils/command/_framework_compat.py,sha256=0iZdSJYzGRWCCvzRDKE-R0-_yaAYvFMd1ylXb2eYXug,1609
|
||||
setuptools/_distutils/command/bdist.py,sha256=kALmrhET0pRKlSqY3EdvF-Y0zz-iJUx4_jorH9Mdadk,5346
|
||||
setuptools/_distutils/command/bdist_dumb.py,sha256=Gi_d1Nz4l6Gz_xU4LRfRVRKBri22WoNPM7uYyX2ksdk,4582
|
||||
setuptools/_distutils/command/bdist_rpm.py,sha256=DfZgvPmm3PTAd39YzLeQ8fmbyRGaXo-nmnUzYEtccg0,21686
|
||||
setuptools/_distutils/command/build.py,sha256=_jUqG3GWKIddOeMVktPUtVv05hSBt-_0zLrTByu3_qA,5729
|
||||
setuptools/_distutils/command/build_clib.py,sha256=qTpvz-Db2wEjKkX_kPwYQzFUCoP5HZrcbLu4HGKTJ0o,7684
|
||||
setuptools/_distutils/command/build_ext.py,sha256=gs7TV3MAaG4hysPk24hUa7M6jZnJzFhi_kMIbwOnYLs,31758
|
||||
setuptools/_distutils/command/build_py.py,sha256=cNe8vwMhyPJ2gN6jot9cOY5OuUPUvT6j9fIFcC-Fcik,16552
|
||||
setuptools/_distutils/command/build_scripts.py,sha256=EHCVyCiP9APE9X74SkOWj4AdS5iBCue7PqpLQP86e1Y,5534
|
||||
setuptools/_distutils/command/check.py,sha256=OM9_pYSz62maIzl3LtzrtTQ658OZQ93sOls86ITghaI,4897
|
||||
setuptools/_distutils/command/clean.py,sha256=qlKth74jWLZjKa2nxOay_2Fua6MVNTroApaQOva2dwc,2595
|
||||
setuptools/_distutils/command/config.py,sha256=FKd2vUSVOp0rpVUer4bj6D94-fyxyF8HJxitRlZFc9c,13008
|
||||
setuptools/_distutils/command/install.py,sha256=1JFKFMnIxXYHHabo8eswl72IiE8SH3M30S-PrhVk2hg,30073
|
||||
setuptools/_distutils/command/install_data.py,sha256=TqzaoQ8PIj7tTc25WKPQ81aZvP_dTYY4p9r_tqtdmg8,2816
|
||||
setuptools/_distutils/command/install_egg_info.py,sha256=Sy2fuBqvCGlQVhJcUteGbdG0KCx2BBN7jAzWEMA9rp8,2788
|
||||
setuptools/_distutils/command/install_headers.py,sha256=Wmpw2npRNUGYPR1BAbRpRaFbDFFf-KKCEVzUGyEuyvY,1184
|
||||
setuptools/_distutils/command/install_lib.py,sha256=pKLNE1rnqdoSo8bq9efQbQuQprg5TGswFpvKka1F3Fg,8330
|
||||
setuptools/_distutils/command/install_scripts.py,sha256=Qw8KrC24mcIeJEvcBNKISMpi75npSm6jzW6BOzA9G7g,1937
|
||||
setuptools/_distutils/command/register.py,sha256=oQZJSgGEadj-hu-yNS_AWxURDx4wmDPScHh3bxqCdoI,11793
|
||||
setuptools/_distutils/command/sdist.py,sha256=VuPtonywrD_DZjkiK8Br4cPp5jnRUWeV8Kc3Q4zn_ig,19175
|
||||
setuptools/_distutils/command/upload.py,sha256=_uOrBBz6C5P6abyohiUmErSez-qxuCOvk-App3u7wmI,7493
|
||||
setuptools/_distutils/compat/__init__.py,sha256=AhMdi3AzX62KYnNdOCcEuNXLuBOxhvOSQHtdji4g8z8,429
|
||||
setuptools/_distutils/compat/py38.py,sha256=QnIRFSvDKiQNGpQebTXKmC_2ZLumH08j5z8y8-J04VM,791
|
||||
setuptools/_distutils/compat/py39.py,sha256=hOsD6lwZLqZoMnacNJ3P6nUA-LJQhEpVtYTzVH0o96M,1964
|
||||
setuptools/_distutils/tests/__init__.py,sha256=bE9qRiJgLJnfPLHIU21uY7uXYCcHY9dx-V0x1vNT_-M,1476
|
||||
setuptools/_distutils/tests/support.py,sha256=tjsYsyxvpTK4NrkCseh2ujvDIGV0Mf_b5SI5fP2T0yM,4099
|
||||
setuptools/_distutils/tests/test_archive_util.py,sha256=c3ANASiJb4cDjOZXyMcNZlaY5iv-amiSS94_vPU_NL4,12911
|
||||
setuptools/_distutils/tests/test_bdist.py,sha256=xNHxUsLlHsZQRwkzLb_iSD24s-9Mk-NX2ffBWwOyPyc,1396
|
||||
setuptools/_distutils/tests/test_bdist_dumb.py,sha256=QF05MHNhPOdZyh88Xpw8KsO64s7pRFkl8KL-RoV4XK0,2247
|
||||
setuptools/_distutils/tests/test_bdist_rpm.py,sha256=YGv9442JC4K3Mh_f1xY6xx3XFZexdIkNdjNChC6_Fe4,3933
|
||||
setuptools/_distutils/tests/test_build.py,sha256=Lf66SO9Wi_exzKgsEE5WpVPgdNYHFr1ivOhKQ2gxC1o,1698
|
||||
setuptools/_distutils/tests/test_build_clib.py,sha256=Mo1ZFb4C1VXBYOGvnallwN7YCnTtr24akLDO8Zi4CsY,4331
|
||||
setuptools/_distutils/tests/test_build_ext.py,sha256=hyxOeHXp6sDb8CHxaGkR4---nP5nwbPtr9JoAJcT9YU,19961
|
||||
setuptools/_distutils/tests/test_build_py.py,sha256=NsfmRrojOHBXNMqWR_mp5g4PLTgjhD7iZFUffGZFIdw,6882
|
||||
setuptools/_distutils/tests/test_build_scripts.py,sha256=cD-FRy-oX55sXRX5Ez5xQCaeHrWajyKc4Xuwv2fe48w,2880
|
||||
setuptools/_distutils/tests/test_ccompiler.py,sha256=eVzZZE8JMIcl4OjwiuhdSNpNdKknAPOnlIe2DnFX-38,2964
|
||||
setuptools/_distutils/tests/test_check.py,sha256=hHSV07qf7YoSxGsTbbsUQ9tssZz5RRNdbrY1s2SwaFI,6226
|
||||
setuptools/_distutils/tests/test_clean.py,sha256=hPH6jfIpGFUrvWbF1txkiNVSNaAxt2wq5XjV499zO4E,1240
|
||||
setuptools/_distutils/tests/test_cmd.py,sha256=bgRB79mitoOKR1OiyZHnCogvGxt3pWkxeTqIC04lQWQ,3254
|
||||
setuptools/_distutils/tests/test_config.py,sha256=LRrpSY1KYyKgEmeLod8soGP3DtfIYJjs1Pt6lXxtVc4,2725
|
||||
setuptools/_distutils/tests/test_config_cmd.py,sha256=Zs6WX0IfxDvmuC19XzuVNnYCnTr9Y-hl73TAmDSBN4Y,2664
|
||||
setuptools/_distutils/tests/test_core.py,sha256=L7XKVAxa-MGoAZeANopnuK9fRKneYhkSQpgw8XQvcF8,3829
|
||||
setuptools/_distutils/tests/test_cygwinccompiler.py,sha256=iqxsDy0Z5ZTRgxM8ARzPXakitQod2V8aG5qet_J5tTg,2753
|
||||
setuptools/_distutils/tests/test_dir_util.py,sha256=KWN3VrlPL8viRpS9Indmx0Oxe9pgCsP4s8S636EeZE4,3854
|
||||
setuptools/_distutils/tests/test_dist.py,sha256=CFlBpbM3wJH2WjsgCGsTV4n5Z3BpfLSln6dixBxeqpM,18459
|
||||
setuptools/_distutils/tests/test_extension.py,sha256=SLJYnLhshfj4u72Q91E_5jnzVpbPljv6_xqV7yKB1Ds,3094
|
||||
setuptools/_distutils/tests/test_file_util.py,sha256=r3MNC-g3BZPKRZdHGMqSatM93D_aooTHBn-Vm4IiEDU,3502
|
||||
setuptools/_distutils/tests/test_filelist.py,sha256=Co8dDWCC5K_DUw5T6TQygBsh_PQVCoOlYQOcsl2agHc,10766
|
||||
setuptools/_distutils/tests/test_install.py,sha256=TfCB0ykhIxydIC2Q4SuTAZzSHvteMHgrBL9whoSgK9Q,8618
|
||||
setuptools/_distutils/tests/test_install_data.py,sha256=vKq3K97k0hBAnOg38nmwEdf7cEDVr9rTVyCeJolgb4A,2464
|
||||
setuptools/_distutils/tests/test_install_headers.py,sha256=PVAYpo_tYl980Qf64DPOmmSvyefIHdU06f7VsJeZykE,936
|
||||
setuptools/_distutils/tests/test_install_lib.py,sha256=qri6Rl-maNTQrNDV8DbeXNl0hjsfRIKiI4rfZLrmWBI,3612
|
||||
setuptools/_distutils/tests/test_install_scripts.py,sha256=KE3v0cDkFW-90IOID-OmZZGM2mhy-ZkEuuW7UXS2SHw,1600
|
||||
setuptools/_distutils/tests/test_log.py,sha256=isFtOufloCyEdZaQOV7cVUr46GwtdVMj43mGBB5XH7k,323
|
||||
setuptools/_distutils/tests/test_mingwccompiler.py,sha256=mBl8W8QIO2xy4eOj6aAEVom4lobwpHM-HvFUIXu6q0c,2202
|
||||
setuptools/_distutils/tests/test_modified.py,sha256=h1--bOWmtJo1bpVV6uRhdnS9br71CBiNDM1MDwSGpug,4221
|
||||
setuptools/_distutils/tests/test_msvccompiler.py,sha256=Va7s0OE3vvWaJbhG6k9xXiIwNB-y2ACc7bbQqxXKzJc,3569
|
||||
setuptools/_distutils/tests/test_register.py,sha256=N81pT28dnGCqylLzLfMlEHtUVguzfnmNPkNUB8IH58U,8839
|
||||
setuptools/_distutils/tests/test_sdist.py,sha256=VerukBsyT4SnfZFBTbiDxMoOLyNW_ELOjA7kZL3VMGQ,15392
|
||||
setuptools/_distutils/tests/test_spawn.py,sha256=bxk4RmNWFmCnyYpAlqtG8VfXfk5TdzcjV53lOxFyyh4,4613
|
||||
setuptools/_distutils/tests/test_sysconfig.py,sha256=iH4Y9E8UHrfl3P-VSt0lbgJMlHuoQsIOorxrsVRQnE8,12010
|
||||
setuptools/_distutils/tests/test_text_file.py,sha256=WQWSB5AfdBDZaMA8BFgipJPnsJb_2SKMfL90fSkRVtw,3460
|
||||
setuptools/_distutils/tests/test_unixccompiler.py,sha256=wcJQJhXtkUUE3I64TyDvM3Yo7G9a0ug_Mp7DbZLwT4Q,11840
|
||||
setuptools/_distutils/tests/test_upload.py,sha256=OsMgqJX-UosIP7eQfAyxVtGBH7el2HNYBAP7qzsp0Qc,6638
|
||||
setuptools/_distutils/tests/test_util.py,sha256=H9zlZ4z4Vh4TfjNYDBsxP7wguQLpxCfJYyOcm1yZU3c,7988
|
||||
setuptools/_distutils/tests/test_version.py,sha256=b0UMdMRBofyySAhUYnziYhImdB3CzsU7696brKDP0XM,2750
|
||||
setuptools/_distutils/tests/test_versionpredicate.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_distutils/tests/unix_compat.py,sha256=z-op6C2iVdX1aq5BIBR7cqOxijKE97alNwJqHNdLpoI,386
|
||||
setuptools/_distutils/tests/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_distutils/tests/compat/py38.py,sha256=myShQTLl-DnyAjRDJrO44HdLaEoFkKIeReHiydCiHg4,1015
|
||||
setuptools/_vendor/ruff.toml,sha256=XWlfZLXLxM4O_osKQZm4zOJlCmnKpl2W05Zbxz0Junc,16
|
||||
setuptools/_vendor/typing_extensions.py,sha256=gwekpyG9DVG3lxWKX4ni8u7nk3We5slG98mA9F3DJQw,134451
|
||||
setuptools/_vendor/autocommand/__init__.py,sha256=zko5Rnvolvb-UXjCx_2ArPTGBWwUK5QY4LIQIKYR7As,1037
|
||||
setuptools/_vendor/autocommand/autoasync.py,sha256=AMdyrxNS4pqWJfP_xuoOcImOHWD-qT7x06wmKN1Vp-U,5680
|
||||
setuptools/_vendor/autocommand/autocommand.py,sha256=hmkEmQ72HtL55gnURVjDOnsfYlGd5lLXbvT4KG496Qw,2505
|
||||
setuptools/_vendor/autocommand/automain.py,sha256=A2b8i754Mxc_DjU9WFr6vqYDWlhz0cn8miu8d8EsxV8,2076
|
||||
setuptools/_vendor/autocommand/autoparse.py,sha256=WVWmZJPcbzUKXP40raQw_0HD8qPJ2V9VG1eFFmmnFxw,11642
|
||||
setuptools/_vendor/autocommand/errors.py,sha256=7aa3roh9Herd6nIKpQHNWEslWE8oq7GiHYVUuRqORnA,886
|
||||
setuptools/_vendor/backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
|
||||
setuptools/_vendor/backports/tarfile/__init__.py,sha256=Pwf2qUIfB0SolJPCKcx3vz3UEu_aids4g4sAfxy94qg,108491
|
||||
setuptools/_vendor/backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
|
||||
setuptools/_vendor/backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
|
||||
setuptools/_vendor/importlib_metadata/__init__.py,sha256=tZNB-23h8Bixi9uCrQqj9Yf0aeC--Josdy3IZRIQeB0,33798
|
||||
setuptools/_vendor/importlib_metadata/_adapters.py,sha256=rIhWTwBvYA1bV7i-5FfVX38qEXDTXFeS5cb5xJtP3ks,2317
|
||||
setuptools/_vendor/importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
|
||||
setuptools/_vendor/importlib_metadata/_compat.py,sha256=73QKrN9KNoaZzhbX5yPCCZa-FaALwXe8TPlDR72JgBU,1314
|
||||
setuptools/_vendor/importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895
|
||||
setuptools/_vendor/importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068
|
||||
setuptools/_vendor/importlib_metadata/_meta.py,sha256=nxZ7C8GVlcBFAKWyVOn_dn7ot_twBcbm1NmvjIetBHI,1801
|
||||
setuptools/_vendor/importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
|
||||
setuptools/_vendor/importlib_metadata/diagnose.py,sha256=nkSRMiowlmkhLYhKhvCg9glmt_11Cox-EmLzEbqYTa8,379
|
||||
setuptools/_vendor/importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_metadata/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_metadata/compat/py311.py,sha256=uqm-K-uohyj1042TH4a9Er_I5o7667DvulcD-gC_fSA,608
|
||||
setuptools/_vendor/importlib_metadata/compat/py39.py,sha256=cPkMv6-0ilK-0Jw_Tkn0xYbOKJZc4WJKQHow0c2T44w,1102
|
||||
setuptools/_vendor/importlib_resources/__init__.py,sha256=uyp1kzYR6SawQBsqlyaXXfIxJx4Z2mM8MjmZn8qq2Gk,505
|
||||
setuptools/_vendor/importlib_resources/_adapters.py,sha256=vprJGbUeHbajX6XCuMP6J3lMrqCi-P_MTlziJUR7jfk,4482
|
||||
setuptools/_vendor/importlib_resources/_common.py,sha256=blt4-ZtHnbUPzQQyPP7jLGgl_86btIW5ZhIsEhclhoA,5571
|
||||
setuptools/_vendor/importlib_resources/_itertools.py,sha256=eDisV6RqiNZOogLSXf6LOGHOYc79FGgPrKNLzFLmCrU,1277
|
||||
setuptools/_vendor/importlib_resources/abc.py,sha256=UKNU9ncEDkZRB3txcGb3WLxsL2iju9JbaLTI-dfLE_4,5162
|
||||
setuptools/_vendor/importlib_resources/functional.py,sha256=mLU4DwSlh8_2IXWqwKOfPVxyRqAEpB3B4XTfRxr3X3M,2651
|
||||
setuptools/_vendor/importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/readers.py,sha256=WNKurBHHVu9EVtUhWkOj2fxH50HP7uanNFuupAqH2S8,5863
|
||||
setuptools/_vendor/importlib_resources/simple.py,sha256=CQ3TiIMFiJs_80o-7xJL1EpbUUVna4-NGDrSTQ3HW2Y,2584
|
||||
setuptools/_vendor/importlib_resources/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/compat/py38.py,sha256=MWhut3XsAJwBYUaa5Qb2AoCrZNqcQjVThP-P1uBoE_4,230
|
||||
setuptools/_vendor/importlib_resources/compat/py39.py,sha256=Wfln4uQUShNz1XdCG-toG6_Y0WrlUmO9JzpvtcfQ-Cw,184
|
||||
setuptools/_vendor/importlib_resources/future/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/future/adapters.py,sha256=1-MF2VRcCButhcC1OMfZILU9o3kwZ4nXB2lurXpaIAw,2940
|
||||
setuptools/_vendor/importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/tests/_path.py,sha256=nkv3ek7D1U898v921rYbldDCtKri2oyYOi3EJqGjEGU,1289
|
||||
setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py,sha256=95N_R7aik8cvnE6sBJpsxmP0K5plOWRIJDgbalD-Hpw,3314
|
||||
setuptools/_vendor/importlib_resources/tests/test_contents.py,sha256=70HW3mL_hv05Emv-OgdmwoLhXxjtuVxiWVaUpgRaRWA,930
|
||||
setuptools/_vendor/importlib_resources/tests/test_custom.py,sha256=QrHZqIWl0e-fsQRfm0ych8stOlKJOsAIU3rK6QOcyN0,1221
|
||||
setuptools/_vendor/importlib_resources/tests/test_files.py,sha256=OcShYu33kCcyXlDyZSVPkJNE08h-N_4bQOLV2QaSqX0,3472
|
||||
setuptools/_vendor/importlib_resources/tests/test_functional.py,sha256=ByCVViAwb2PIlKvDNJEqTZ0aLZGpFl5qa7CMCX-7HKM,8591
|
||||
setuptools/_vendor/importlib_resources/tests/test_open.py,sha256=ccmzbOeEa6zTd4ymZZ8yISrecfuYV0jhon-Vddqysu4,2778
|
||||
setuptools/_vendor/importlib_resources/tests/test_path.py,sha256=x8r2gJxG3hFM9xCOFNkgmHYXxsMldMLTSW_AZYf1l-A,2009
|
||||
setuptools/_vendor/importlib_resources/tests/test_read.py,sha256=7tsILQ2NoqVGFQxhHqxBwc5hWcN8b_3idojCsszTNfQ,3112
|
||||
setuptools/_vendor/importlib_resources/tests/test_reader.py,sha256=IcIUXaiPAtuahGV4_ZT4YXFLMMsJmcM1iOxqdIH2Aa4,5001
|
||||
setuptools/_vendor/importlib_resources/tests/test_resource.py,sha256=fcF8WgZ6rDCTRFnxtAUbdiaNe4G23yGovT1nb2dc7ls,7823
|
||||
setuptools/_vendor/importlib_resources/tests/util.py,sha256=vjVzEyX0X2RkTN-wGiQiplayp9sZom4JDjJinTNewos,4745
|
||||
setuptools/_vendor/importlib_resources/tests/zip.py,sha256=2MKmF8-osXBJSnqcUTuAUek_-tSB3iKmIT9qPhcsOsM,783
|
||||
setuptools/_vendor/importlib_resources/tests/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/tests/compat/py312.py,sha256=qcWjpZhQo2oEsdwIlRRQHrsMGDltkFTnETeG7fLdUS8,364
|
||||
setuptools/_vendor/importlib_resources/tests/compat/py39.py,sha256=lRTk0RWAOEb9RzAgvdRnqJUGCBLc3qoFQwzuJSa_zP4,329
|
||||
setuptools/_vendor/importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
|
||||
setuptools/_vendor/importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
|
||||
setuptools/_vendor/importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
|
||||
setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file,sha256=xtRM9Bj2EOP-nh2SlP9D3vgcbNytbLsYIM_0jTqkNV0,4
|
||||
setuptools/_vendor/importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
|
||||
setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt,sha256=jnrBBztxYrtQck7cmVnc4xQVO4-agzAZDGSFkAWtlFw,10
|
||||
setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
|
||||
setuptools/_vendor/importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
|
||||
setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
|
||||
setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
|
||||
setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file,sha256=cbkhEL8TXIVYHIoSj2oZwPasp1KwxskeNXGJnPCbFF0,4
|
||||
setuptools/_vendor/inflect/__init__.py,sha256=Jxy1HJXZiZ85kHeLAhkmvz6EMTdFqBe-duvt34R6IOc,103796
|
||||
setuptools/_vendor/inflect/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/inflect/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/inflect/compat/py38.py,sha256=oObVfVnWX9_OpnOuEJn1mFbJxVhwyR5epbiTNXDDaso,160
|
||||
setuptools/_vendor/jaraco/context.py,sha256=REoLIxDkO5MfEYowt_WoupNCRoxBS5v7YX2PbW8lIcs,9552
|
||||
setuptools/_vendor/jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
|
||||
setuptools/_vendor/jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
|
||||
setuptools/_vendor/jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
|
||||
setuptools/_vendor/jaraco/text/__init__.py,sha256=Y2YUqXR_orUoDaY4SkPRe6ZZhb5HUHB_Ah9RCNsVyho,16250
|
||||
setuptools/_vendor/jaraco/text/layouts.py,sha256=HTC8aSTLZ7uXipyOXapRMC158juecjK6RVwitfmZ9_w,643
|
||||
setuptools/_vendor/jaraco/text/show-newlines.py,sha256=WGQa65e8lyhb92LUOLqVn6KaCtoeVgVws6WtSRmLk6w,904
|
||||
setuptools/_vendor/jaraco/text/strip-prefix.py,sha256=NfVXV8JVNo6nqcuYASfMV7_y4Eo8zMQqlCOGvAnRIVw,412
|
||||
setuptools/_vendor/jaraco/text/to-dvorak.py,sha256=1SNcbSsvISpXXg-LnybIHHY-RUFOQr36zcHkY1pWFqw,119
|
||||
setuptools/_vendor/jaraco/text/to-qwerty.py,sha256=s4UMQUnPwFn_dB5uZC27BurHOQcYondBfzIpVL5pEzw,119
|
||||
setuptools/_vendor/more_itertools/__init__.py,sha256=dtAbGjTDmn_ghiU5YXfhyDy0phAlXVdt5klZA5fUa-Q,149
|
||||
setuptools/_vendor/more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
|
||||
setuptools/_vendor/more_itertools/more.py,sha256=1E5kzFncRKTDw0cYv1yRXMgDdunstLQd1QStcnL6U90,148370
|
||||
setuptools/_vendor/more_itertools/more.pyi,sha256=iXXeqt48Nxe8VGmIWpkVXuKpR2FYNuu2DU8nQLWCCu0,21484
|
||||
setuptools/_vendor/more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/more_itertools/recipes.py,sha256=WedhhfhGVgr6zii8fIbGJVmRTw0ZKRiLKnYBDGJv4nY,28591
|
||||
setuptools/_vendor/more_itertools/recipes.pyi,sha256=T_mdGpcFdfrP3JSWbwzYP9JyNV-Go-7RPfpxfftAWlA,4617
|
||||
setuptools/_vendor/packaging/__init__.py,sha256=dtw2bNmWCQ9WnMoK3bk_elL1svSlikXtLpZhCFIB9SE,496
|
||||
setuptools/_vendor/packaging/_elffile.py,sha256=_LcJW4YNKywYsl4169B2ukKRqwxjxst_8H0FRVQKlz8,3282
|
||||
setuptools/_vendor/packaging/_manylinux.py,sha256=Xo4V0PZz8sbuVCbTni0t1CR0AHeir_7ib4lTmV8scD4,9586
|
||||
setuptools/_vendor/packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
|
||||
setuptools/_vendor/packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
|
||||
setuptools/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
||||
setuptools/_vendor/packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
|
||||
setuptools/_vendor/packaging/markers.py,sha256=dWKSqn5Sp-jDmOG-W3GfLHKjwhf1IsznbT71VlBoB5M,10671
|
||||
setuptools/_vendor/packaging/metadata.py,sha256=KINuSkJ12u-SyoKNTy_pHNGAfMUtxNvZ53qA1zAKcKI,32349
|
||||
setuptools/_vendor/packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
|
||||
setuptools/_vendor/packaging/specifiers.py,sha256=rjpc3hoJuunRIT6DdH7gLTnQ5j5QKSuWjoTC5sdHtHI,39714
|
||||
setuptools/_vendor/packaging/tags.py,sha256=y8EbheOu9WS7s-MebaXMcHMF-jzsA_C1Lz5XRTiSy4w,18883
|
||||
setuptools/_vendor/packaging/utils.py,sha256=NAdYUwnlAOpkat_RthavX8a07YuVxgGL_vwrx73GSDM,5287
|
||||
setuptools/_vendor/packaging/version.py,sha256=V0H3SOj_8werrvorrb6QDLRhlcqSErNTTkCvvfszhDI,16198
|
||||
setuptools/_vendor/platformdirs/__init__.py,sha256=EMGE8qeHRR9CzDFr8kL3tA8hdZZniYjXBVZd0UGTWK0,22225
|
||||
setuptools/_vendor/platformdirs/__main__.py,sha256=HnsUQHpiBaiTxwcmwVw-nFaPdVNZtQIdi1eWDtI-MzI,1493
|
||||
setuptools/_vendor/platformdirs/android.py,sha256=xZXY9Jd46WOsxT2U6-5HsNtDZ-IQqxcEUrBLl3hYk4o,9016
|
||||
setuptools/_vendor/platformdirs/api.py,sha256=QBYdUac2eC521ek_y53uD1Dcq-lJX8IgSRVd4InC6uc,8996
|
||||
setuptools/_vendor/platformdirs/macos.py,sha256=wftsbsvq6nZ0WORXSiCrZNkRHz_WKuktl0a6mC7MFkI,5580
|
||||
setuptools/_vendor/platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/platformdirs/unix.py,sha256=Cci9Wqt35dAMsg6HT9nRGHSBW5obb0pR3AE1JJnsCXg,10643
|
||||
setuptools/_vendor/platformdirs/version.py,sha256=r7F76tZRjgQKzrpx_I0_ZMQOMU-PS7eGnHD7zEK3KB0,411
|
||||
setuptools/_vendor/platformdirs/windows.py,sha256=IFpiohUBwxPtCzlyKwNtxyW4Jk8haa6W8o59mfrDXVo,10125
|
||||
setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089
|
||||
setuptools/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396
|
||||
setuptools/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633
|
||||
setuptools/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943
|
||||
setuptools/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254
|
||||
setuptools/_vendor/tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
|
||||
setuptools/_vendor/typeguard/__init__.py,sha256=Onh4w38elPCjtlcU3JY9k3h70NjsxXIkAflmQn-Z0FY,2071
|
||||
setuptools/_vendor/typeguard/_checkers.py,sha256=JRrgKicdOEfIBoNEtegYCEIlhpad-a1u1Em7GCj0WCI,31360
|
||||
setuptools/_vendor/typeguard/_config.py,sha256=nIz8QwDa-oFO3L9O8_6srzlmd99pSby2wOM4Wb7F_B0,2846
|
||||
setuptools/_vendor/typeguard/_decorators.py,sha256=v6dsIeWvPhExGLP_wXF-RmDUyjZf_Ak28g7gBJ_v0-0,9033
|
||||
setuptools/_vendor/typeguard/_exceptions.py,sha256=ZIPeiV-FBd5Emw2EaWd2Fvlsrwi4ocwT2fVGBIAtHcQ,1121
|
||||
setuptools/_vendor/typeguard/_functions.py,sha256=ibgSAKa5ptIm1eR9ARG0BSozAFJPFNASZqhPVyQeqig,10393
|
||||
setuptools/_vendor/typeguard/_importhook.py,sha256=ugjCDvFcdWMU7UugqlJG91IpVNpEIxtRr-99s0h1k7M,6389
|
||||
setuptools/_vendor/typeguard/_memo.py,sha256=1juQV_vxnD2JYKbSrebiQuj4oKHz6n67v9pYA-CCISg,1303
|
||||
setuptools/_vendor/typeguard/_pytest_plugin.py,sha256=-fcSqkv54rIfIF8pDavY5YQPkj4OX8GMt_lL7CQSD4I,4416
|
||||
setuptools/_vendor/typeguard/_suppression.py,sha256=VQfzxcwIbu3if0f7VBkKM7hkYOA7tNFw9a7jMBsmMg4,2266
|
||||
setuptools/_vendor/typeguard/_transformer.py,sha256=9Ha7_QhdwoUni_6hvdY-hZbuEergowHrNL2vzHIakFY,44937
|
||||
setuptools/_vendor/typeguard/_union_transformer.py,sha256=v_42r7-6HuRX2SoFwnyJ-E5PlxXpVeUJPJR1-HU9qSo,1354
|
||||
setuptools/_vendor/typeguard/_utils.py,sha256=5HhO1rPn5f1M6ymkVAEv7Xmlz1cX-j0OnTMlyHqqrR8,5270
|
||||
setuptools/_vendor/typeguard/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/wheel/__init__.py,sha256=D6jhH00eMzbgrXGAeOwVfD5i-lCAMMycuG1L0useDlo,59
|
||||
setuptools/_vendor/wheel/__main__.py,sha256=NkMUnuTCGcOkgY0IBLgBCVC_BGGcWORx2K8jYGS12UE,455
|
||||
setuptools/_vendor/wheel/_setuptools_logging.py,sha256=NoCnjJ4DFEZ45Eo-2BdXLsWJCwGkait1tp_17paleVw,746
|
||||
setuptools/_vendor/wheel/bdist_wheel.py,sha256=OKJyp9E831zJrxoRfmM9AgOjByG1CB-pzF5kXQFmaKk,20938
|
||||
setuptools/_vendor/wheel/macosx_libfile.py,sha256=HnW6OPdN993psStvwl49xtx2kw7hoVbe6nvwmf8WsKI,16103
|
||||
setuptools/_vendor/wheel/metadata.py,sha256=q-xCCqSAK7HzyZxK9A6_HAWmhqS1oB4BFw1-rHQxBiQ,5884
|
||||
setuptools/_vendor/wheel/util.py,sha256=e0jpnsbbM9QhaaMSyap-_ZgUxcxwpyLDk6RHcrduPLg,621
|
||||
setuptools/_vendor/wheel/wheelfile.py,sha256=DtJDWoZMvnBh4leNMDPGOprQU9d_dp6q-MmV0U--4xc,7694
|
||||
setuptools/_vendor/wheel/cli/__init__.py,sha256=eBNhnPwWTtdKAJHy77lvz7gOQ5Eu3GavGugXxhSsn-U,4264
|
||||
setuptools/_vendor/wheel/cli/convert.py,sha256=qJcpYGKqdfw1P6BelgN1Hn_suNgM6bvyEWFlZeuSWx0,9439
|
||||
setuptools/_vendor/wheel/cli/pack.py,sha256=CAFcHdBVulvsHYJlndKVO7KMI9JqBTZz5ii0PKxxCOs,3103
|
||||
setuptools/_vendor/wheel/cli/tags.py,sha256=lHw-LaWrkS5Jy_qWcw-6pSjeNM6yAjDnqKI3E5JTTCU,4760
|
||||
setuptools/_vendor/wheel/cli/unpack.py,sha256=Y_J7ynxPSoFFTT7H0fMgbBlVErwyDGcObgme5MBuz58,1021
|
||||
setuptools/_vendor/wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/wheel/vendored/vendor.txt,sha256=Z2ENjB1i5prfez8CdM1Sdr3c6Zxv2rRRolMpLmBncAE,16
|
||||
setuptools/_vendor/wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/wheel/vendored/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
|
||||
setuptools/_vendor/wheel/vendored/packaging/_manylinux.py,sha256=P7sdR5_7XBY09LVYYPhHmydMJIIwPXWsh4olk74Uuj4,9588
|
||||
setuptools/_vendor/wheel/vendored/packaging/_musllinux.py,sha256=z1s8To2hQ0vpn_d-O2i5qxGwEK8WmGlLt3d_26V7NeY,2674
|
||||
setuptools/_vendor/wheel/vendored/packaging/_parser.py,sha256=4tT4emSl2qTaU7VTQE1Xa9o1jMPCsBezsYBxyNMUN-s,10347
|
||||
setuptools/_vendor/wheel/vendored/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
||||
setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
|
||||
setuptools/_vendor/wheel/vendored/packaging/markers.py,sha256=_TSPI1BhJYO7Bp9AzTmHQxIqHEVXaTjmDh9G-w8qzPA,8232
|
||||
setuptools/_vendor/wheel/vendored/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
|
||||
setuptools/_vendor/wheel/vendored/packaging/specifiers.py,sha256=IWSt0SrLSP72heWhAC8UL0eGvas7XIQHjqiViVfmPKE,39778
|
||||
setuptools/_vendor/wheel/vendored/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
|
||||
setuptools/_vendor/wheel/vendored/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
|
||||
setuptools/_vendor/wheel/vendored/packaging/version.py,sha256=PFJaYZDxBgyxkfYhH3SQw4qfE9ICCWrTmitvq14y3bs,16234
|
||||
setuptools/_vendor/zipp/__init__.py,sha256=QuI1g00G4fRAcGt-HqbV0oWIkmSgedCGGYsHHYzNa8A,13412
|
||||
setuptools/_vendor/zipp/glob.py,sha256=etWpnfEoRyfUvrUsi6sTiGmErvPwe6HzY6pT8jg_lUI,3082
|
||||
setuptools/_vendor/zipp/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/zipp/compat/py310.py,sha256=eZpkW0zRtunkhEh8jjX3gCGe22emoKCBJw72Zt4RkhA,219
|
||||
setuptools/command/__init__.py,sha256=JbA5PmpnZ2AHQRFVT2rz5u_PjJFs5647GOIO5OuJzDQ,397
|
||||
setuptools/command/_requirestxt.py,sha256=s2MIv5vWc7N_5mHziUIPvl6VjoPIUJsiv0cS4iXEy5M,4227
|
||||
setuptools/command/alias.py,sha256=aiGMw1AeN4rqJjr3Nr3SVDbLXjgjufriwcMM1nBmiHY,2383
|
||||
setuptools/command/bdist_egg.py,sha256=MmzT4GB9e7FUJ-KyoWyyDGRdk77qzsg8N1VQvVLx35E,16526
|
||||
setuptools/command/bdist_rpm.py,sha256=99wVFBa8EF3gHbEq10QojuYcpx3zX_NLCYZNpbG66GU,1427
|
||||
setuptools/command/bdist_wheel.py,sha256=qIKiwKyWBHjTBO5YfNB10X-y62cgZ5MbzRlQPDqnnKE,22200
|
||||
setuptools/command/build.py,sha256=C9TQr7t_q3TehWscxEPUbXH7r08pCyN0tDq9rxPFoo4,6028
|
||||
setuptools/command/build_clib.py,sha256=vxp5WmzR6RQYmhXXyAz4w00w8N7xwCqPmauNukgzrJ0,4736
|
||||
setuptools/command/build_ext.py,sha256=DuH4NcgaQwV2si9-27uBWmad-0AdxC5U7cDe8TKpnIA,18264
|
||||
setuptools/command/build_py.py,sha256=G2Hxu-Wqzh6dxWbRjRHfES_SllYUGUKryH_0ECsx2VU,15327
|
||||
setuptools/command/develop.py,sha256=SSQxIKZXP7-kUOfTT4a3WIozssiDOivHQf61o034UO4,6854
|
||||
setuptools/command/dist_info.py,sha256=FueFqI-t7-KeP1E8PyG9A0f4tDCfrw0XXf7vnaYHjY4,3508
|
||||
setuptools/command/easy_install.py,sha256=KQALpMNlJOpluNwlZsE0iZyQbo_1gF1-a9WlCk42REU,87387
|
||||
setuptools/command/editable_wheel.py,sha256=xd90bnzUVKpd--RI2dAcsSK2a9toZVTyO4-lIJL7jmw,35496
|
||||
setuptools/command/egg_info.py,sha256=_vSpINMP8w5oeMr9kH7VqSDp6X8WFXhoyMj4BUs5E40,25610
|
||||
setuptools/command/install.py,sha256=qOT-JZSBS5te3tGjvXr3Aqgoghn6HI9w1Ud4SkMrVN0,6208
|
||||
setuptools/command/install_egg_info.py,sha256=brYRRGb0_8zJ3-fQ0fnkeXCzX2raLNX05Ja7n73cJJI,2046
|
||||
setuptools/command/install_lib.py,sha256=3-_JRnvnQxTD0lSV9AtnxokwtIpvNaPBlKGp2i5S5x8,4133
|
||||
setuptools/command/install_scripts.py,sha256=mQbIx9cuMISaEj2FOi6Yu7_u_q3go_beB6gphwBCrwI,2614
|
||||
setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628
|
||||
setuptools/command/register.py,sha256=CSws23FWnqLoi6ueJcB89l31GCFKNuElIeXqLV32Xlw,605
|
||||
setuptools/command/rotate.py,sha256=9r8g3tkbvj7yOVkr6c2cmcxv5LNqyAijADbmjJ_Zy24,2145
|
||||
setuptools/command/saveopts.py,sha256=mVAPMRIGE98gl6eXQ3C2Wo-qPOgl9lbH-Q_YsbLuqeg,657
|
||||
setuptools/command/sdist.py,sha256=kGbGbvMnHm3GX8DMQAiKPyfvzWqtulWEnZN5bT22VGQ,7277
|
||||
setuptools/command/setopt.py,sha256=MjJy-0pCjnKCouYlg_ouTyB2HZn9Gz4CY0JupdXE3no,5019
|
||||
setuptools/command/test.py,sha256=k7xcq7D7bEehgxarbw-dW3AtmGZORqz8HjKR6FGJ3jk,1343
|
||||
setuptools/command/upload.py,sha256=8Z91fjY6SCZtKCAlcbNYicVY1jERiqGhW1NAJUUR6Fs,607
|
||||
setuptools/command/upload_docs.py,sha256=W-rtW1V4erPjn4SVzPSF1EzwCZLBURVvfPnEyf0DNqM,7754
|
||||
setuptools/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/compat/py310.py,sha256=8sqwWczIcrkzeAbhaim4pKVd4tXZdcqmebgdvzji0rc,141
|
||||
setuptools/compat/py311.py,sha256=e6tJAFwZEP82hmMBl10HYeSypelo_Ti2wTjKZVKLwOE,790
|
||||
setuptools/compat/py39.py,sha256=BJMtnkfcqyTfccqjYQxfoRtU2nTnWaEESBVkshTiXqY,493
|
||||
setuptools/config/NOTICE,sha256=Ld3wiBgpejuJ1D2V_2WdjahXQRCMkTbfo6TYVsBiO9g,493
|
||||
setuptools/config/__init__.py,sha256=aiPnL9BJn1O6MfmuNXyn8W2Lp8u9qizRVqwPiOdPIjY,1499
|
||||
setuptools/config/_apply_pyprojecttoml.py,sha256=XGiPPxjidEpnBFVkYssrccuovQQvrAlSn4JQzaHN5Bg,14973
|
||||
setuptools/config/distutils.schema.json,sha256=Tcp32kRnhwORGw_9p6GEi08lj2h15tQRzOYBbzGmcBU,972
|
||||
setuptools/config/expand.py,sha256=jsZUUaU_8uy-ZzlJC0khmpGjCVRlbV-sFyMIk-SGYIQ,15659
|
||||
setuptools/config/pyprojecttoml.py,sha256=O1DNHznVIFcXiLSRLk4idS1m_jXQ7wiyZ8MMUs39ITE,18018
|
||||
setuptools/config/setupcfg.py,sha256=7DMAL0-vo4fT6_dpG1X20Kvi4S-cDjfHe5k_gzAnQUk,25634
|
||||
setuptools/config/setuptools.schema.json,sha256=Nx1r_NN2xa9AoVQhix_n_-DmyLMHQAZDY36maVb9irQ,13758
|
||||
setuptools/config/_validate_pyproject/NOTICE,sha256=Ccm86pXKCG-Lxb7RdOQLyDWyl9QPtfhru7Vw_gpVgac,18737
|
||||
setuptools/config/_validate_pyproject/__init__.py,sha256=dnp6T7ePP1R5z4OuC7Fd2dkFlIrtIfizUfvpGJP6nz0,1042
|
||||
setuptools/config/_validate_pyproject/error_reporting.py,sha256=meldD7nBQdolQhvG-43r1Ue-gU1n7ORAJR86vh3Rrvk,11813
|
||||
setuptools/config/_validate_pyproject/extra_validations.py,sha256=kd0SWYrsp3IXF9KGAM8QpeaKpGyMsNgL-tjh9TPfhyY,1625
|
||||
setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py,sha256=w749JgqKi8clBFcObdcbZVqsmF4oJ_QByhZ1SGbUFNw,1612
|
||||
setuptools/config/_validate_pyproject/fastjsonschema_validations.py,sha256=3jd-Qzt5Qduose78vV9OQL_ciCxkDKIbaunTsDwvWFk,295781
|
||||
setuptools/config/_validate_pyproject/formats.py,sha256=Mjz6Rjx1BMDw2XTMAfS-sM5x5Fv5aX2Zk_q5M_7rf_c,12814
|
||||
setuptools/tests/__init__.py,sha256=AnBfls2iJbTDQzmMKeLRt-9lxhaOHUVOZEgXv89Uwvs,335
|
||||
setuptools/tests/contexts.py,sha256=TAdZKxmmodx1ExMVo01o4QpRjpIpo4X3IWKq_BnjxpU,3480
|
||||
setuptools/tests/environment.py,sha256=95_UtTaRiuvwYC9eXKEHbn02kDtZysvZq3UZJmPUj1I,3102
|
||||
setuptools/tests/fixtures.py,sha256=-V7iD6BeE2E0Rw6dVvTOCm36JG8ZTTnrXhN0GISlgrg,5197
|
||||
setuptools/tests/mod_with_constant.py,sha256=X_Kj80M55w1tmQ4f7uZY91ZTALo4hKVT6EHxgYocUMQ,22
|
||||
setuptools/tests/namespaces.py,sha256=HPcI3nR5MCFWXpaADIJ1fwKxymcQgBkuw87Ic5PUSAQ,2774
|
||||
setuptools/tests/script-with-bom.py,sha256=hRRgIizEULGiG_ZTNoMY46HhKhxpWfy5FGcD6Qbh5fc,18
|
||||
setuptools/tests/server.py,sha256=ZRJOKKQGUoa03ijPFrFiqNId428Eb9uy_ysOZtagUNs,2403
|
||||
setuptools/tests/test_archive_util.py,sha256=buuKdY8XkW26Pe3IKAoBRGHG0MDumnuNoPg2WsAQzIg,845
|
||||
setuptools/tests/test_bdist_deprecations.py,sha256=75Xq3gYn79LIIyusEltbHan0bEgAt2e_CaL7KLS8-KQ,775
|
||||
setuptools/tests/test_bdist_egg.py,sha256=cdfZsWpFmDRSbZeVu-69T0QiCc-246rbUjtkuB7iBiY,1851
|
||||
setuptools/tests/test_bdist_wheel.py,sha256=wfI9qDa43rFfIkICaR4GIOkyE_GFI-oV6HgVj1HO6RQ,19236
|
||||
setuptools/tests/test_build.py,sha256=wJgMz2hwHADcLFg-nXrwRVhus7hjmAeEGgrpIQwCGnA,798
|
||||
setuptools/tests/test_build_clib.py,sha256=bX51XRAf4uO7IuHFpjePnoK8mE74N2gsoeEqF-ofgws,3123
|
||||
setuptools/tests/test_build_ext.py,sha256=XpQPFBuhvXH71P6YaLmDx_jXzfDw6j17EKUrcq8vkZY,10036
|
||||
setuptools/tests/test_build_meta.py,sha256=4y1Cps2PVAxVi_t6A44qv8DC6QFG2fUCWZ2AJl44FA8,33649
|
||||
setuptools/tests/test_build_py.py,sha256=2MOiPfZ9CvwIOd5TNV6hCq9UrXA3HXJm5vj5GK7opsw,14185
|
||||
setuptools/tests/test_config_discovery.py,sha256=52_HEdA97dgaMvudsnIZDy7IYZPvHb5k6Yf9AA1954A,22562
|
||||
setuptools/tests/test_core_metadata.py,sha256=7xWmlSgjI_r_tUscBVt-kg2JP10Et5hMNI0f8nMtKPA,12183
|
||||
setuptools/tests/test_depends.py,sha256=yQBXoQbNQlJit6mbRVoz6Bb553f3sNrq02lZimNz5XY,424
|
||||
setuptools/tests/test_develop.py,sha256=CLzXZ8-b5-VFTuau4P4yXEdLx1UdyTFcOfrV0qyUIdE,5142
|
||||
setuptools/tests/test_dist.py,sha256=nG7y41IulZ9pjH-biwOctcQOafpRbNYz0I5sDX8d1pA,8811
|
||||
setuptools/tests/test_dist_info.py,sha256=exqqU5XSksRiXjUu4mx1SmN-_jnPoIPPWwsLqBc8fQQ,7090
|
||||
setuptools/tests/test_distutils_adoption.py,sha256=ZpdJ2wCyq2zGxsBAcjT2X72Vsj0YXIpW-7fhynb361I,4747
|
||||
setuptools/tests/test_easy_install.py,sha256=0oUehGspDiGXOyiVDKg5LPYsNmtVUhZ8FOhBoNYnigs,53241
|
||||
setuptools/tests/test_editable_install.py,sha256=W2k8YmcO9sCZleIjEtL1b_e9M16KO5L9a1GgTmNx2uY,43304
|
||||
setuptools/tests/test_egg_info.py,sha256=MvJWvIn5YZRi3vQGph4SW0AGHCf73G3L3Hdz9AnAZwk,44145
|
||||
setuptools/tests/test_extern.py,sha256=rpKU6oCcksumLwf5TeKlDluFQ0TUfbPwTLQbpxcFrCU,296
|
||||
setuptools/tests/test_find_packages.py,sha256=CTLAcTzWGWBLCcd2aAsUVkvO3ibrlqexFBdDKOWPoq8,7819
|
||||
setuptools/tests/test_find_py_modules.py,sha256=zQjuhIG5TQN2SJPix9ARo4DL_w84Ln8QsHDUjjbrtAQ,2404
|
||||
setuptools/tests/test_glob.py,sha256=pXMUqidIfGG2TM-5Z9M2T7Dz_OLPD6X-aX8QiUpFv2U,881
|
||||
setuptools/tests/test_install_scripts.py,sha256=bJFXiOYmMd-7ZgB9Kueh_vmiiBtTD3jDFOCSkzSys9Q,3441
|
||||
setuptools/tests/test_integration.py,sha256=zQ-TCRQc4gqRZogcdorgal9IM5uZEK5UiQSoubDdvKU,3882
|
||||
setuptools/tests/test_logging.py,sha256=9MeqJ_jgQTlE9M4WN7YuFC9iYhKrT9fsJIaKG1L73p4,2095
|
||||
setuptools/tests/test_manifest.py,sha256=N4UY8Y1TvjOiKerLHSF6LkHuenXGl8tw6o-ivlj3e5A,18761
|
||||
setuptools/tests/test_namespaces.py,sha256=Y6utoe5PHHqL_DlgawqB9F8XpsUDPvvw1sQMenK04e0,4515
|
||||
setuptools/tests/test_packageindex.py,sha256=zewHXX48XhgYmOwFOshw0eNT40ShsUzt5MnYB9PzRHM,8975
|
||||
setuptools/tests/test_register.py,sha256=9fgE2FyPPmHdwS8Ex3PBRo0xRr-HwgCtjXEITfbZ_Zc,517
|
||||
setuptools/tests/test_sandbox.py,sha256=sLaKBRkvQ3NpI0ZNewIjWiwCTmuuie8IPkJQyx-5kX0,4333
|
||||
setuptools/tests/test_sdist.py,sha256=TOThIW4wsR_2tv6RpJitncN_7Ovlkk_sVb86xaEPAFI,31912
|
||||
setuptools/tests/test_setopt.py,sha256=3VxxM4ATfP-P4AGnDjoWCnHr5-i9CSEQTFYU1-FTnvI,1365
|
||||
setuptools/tests/test_setuptools.py,sha256=OprrduUUgTxuTCwAsSI9TUmJJFjjtr-3b6uEuW5CNY8,8978
|
||||
setuptools/tests/test_unicode_utils.py,sha256=xWfEEl8jkQCt9othUTXJfFmdyATAFggJs2tTxjbumbw,316
|
||||
setuptools/tests/test_upload.py,sha256=wKH3zSINflmZk9XTwF4-YIPr65AcgWpHkg0bUyJYroc,507
|
||||
setuptools/tests/test_virtualenv.py,sha256=OlgHQrcrF-KtA5Noh0Iw2VVvHmkvJDZ6CWUzq4etKrE,3809
|
||||
setuptools/tests/test_warnings.py,sha256=zwR2zcnCeCeDqILZlJOPAcuyPHoDvGu1OtOVYiLMk74,3347
|
||||
setuptools/tests/test_wheel.py,sha256=pV5nPUXyJ1EIGzrWUb5cMDwrKKK2DIf8e-04pGlR7YY,19259
|
||||
setuptools/tests/test_windows_wrappers.py,sha256=2FmgEfFGX5OV0kVOpPPEZICAL76TrcNbfa2uTCzs134,7894
|
||||
setuptools/tests/text.py,sha256=a12197pMVTvB6FAWQ0ujT8fIQiLIWJlFAl1UCaDUDfg,123
|
||||
setuptools/tests/textwrap.py,sha256=FNNNq_MiaEJx88PnsbJQIRxmj1qmgcAOCXXRsODPJN4,98
|
||||
setuptools/tests/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/tests/compat/py39.py,sha256=eUy7_F-6KRTOIKl-veshUu6I0EdTSdBZMh0EV0lZ1-g,135
|
||||
setuptools/tests/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/tests/config/setupcfg_examples.txt,sha256=cAbVvCbkFZuTUL6xRRzRgqyB0rLvJTfvw3D30glo2OE,1912
|
||||
setuptools/tests/config/test_apply_pyprojecttoml.py,sha256=STdSbHcN0DK6_ByNpnP9beTpR_1YUOuVsmowNs2Uo2U,18473
|
||||
setuptools/tests/config/test_expand.py,sha256=yJmhq52MqTPEft6QOSlZR8yN6dkJVtlOcTo9D7pqGXU,8111
|
||||
setuptools/tests/config/test_pyprojecttoml.py,sha256=MxQewMcnZcZTZAhoJbGRMVvRs2eRlbHQsnAJBOqlHec,12398
|
||||
setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py,sha256=h3f9fPy-oxtC_HPgxtMGM1LgdFz01RYtTL47ubFZp5M,3072
|
||||
setuptools/tests/config/test_setupcfg.py,sha256=t12suujLW4E-xARDfExy01RmSkGF1l2lsBvXqhBPfts,33361
|
||||
setuptools/tests/config/downloads/__init__.py,sha256=0WrIXZDfFnFkRVio3a-Z6RffRMZbc3AuJstWaTrorxg,1762
|
||||
setuptools/tests/config/downloads/preload.py,sha256=sIGGZpY3cmMpMwiJYYYYHG2ifZJkvJgEotRFtiulV1I,450
|
||||
setuptools/tests/indexes/test_links_priority/external.html,sha256=eL9euOuE93JKZdqlXxBOlHbKwIuNuIdq7GBRpsaPMcU,92
|
||||
setuptools/tests/indexes/test_links_priority/simple/foobar/index.html,sha256=DD-TKr7UU4zAjHHz4VexYDNSAzR27levSh1c-k3ZdLE,174
|
||||
setuptools/tests/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/tests/integration/helpers.py,sha256=3PHcS9SCA-fwVJmUP2ad5NQOttJAETI5Nnoc_xroO5k,2522
|
||||
setuptools/tests/integration/test_pip_install_sdist.py,sha256=ebz2JTvinbE0XKm-I3HaSkunm6oElNaIzcrTfvf6OYQ,8288
|
||||
setuptools-74.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools-74.0.0.dist-info/METADATA,sha256=xhABPINxNfIpUfcsm-l4qoi2tBf4D4A-JdlVDl7pzOA,6662
|
||||
setuptools-74.0.0.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91
|
||||
setuptools-74.0.0.dist-info/entry_points.txt,sha256=vrJkNDLAQaVfqJRF2XS-UT5wsuNLH09WS536cjDi0DE,2506
|
||||
setuptools-74.0.0.dist-info/top_level.txt,sha256=d9yL39v_W7qmKDDSH6sT4bE0j_Ls1M3P161OGgdsm4g,41
|
||||
setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
|
||||
setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
||||
setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
|
||||
setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE,sha256=reeNBJgtaZctREqOFKlPh6IzTdOFXMgDSOqOJAqg3y0,7634
|
||||
setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA,sha256=OADZuR3O6iBlpu1ieTgzYul6w4uOVrk0P0BO5TGGAJk,15006
|
||||
setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD,sha256=giu6ZrQVJvpUcYa4AiH4XaUNZSvuVJPb_l0UCFES8MM,1308
|
||||
setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
||||
setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt,sha256=AzfhgKKS8EdAwWUTSF8mgeVQbXOY9kokHB6kSqwwqu0,12
|
||||
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/METADATA,sha256=ghXFTq132dxaEIolxr3HK1mZqm9iyUmaRANZQSr6WlE,2020
|
||||
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD,sha256=JYofHISeEXUGmlWl1s41ev3QTjTNXeJwk-Ss7HqdLOE,1360
|
||||
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
|
||||
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
||||
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/METADATA,sha256=anuQ7_7h4J1bSEzfcjIBakPi2cyVQ7y7jklLHsBeH1k,4648
|
||||
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD,sha256=DY08buueu-hsrH1ghhVSQzwynanqUSSLYdAr4uXmQDA,2518
|
||||
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
|
||||
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
|
||||
setuptools/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/importlib_resources-6.4.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
||||
setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA,sha256=g4eM2LuL0OiZcUVND0qwDJUpE29gOvtO3BSPXTbO9Fk,3944
|
||||
setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD,sha256=ln3Vb-6hQ_HSxOmKwfk3wFXmHJqgQlFG1V9618glEPo,7620
|
||||
setuptools/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
|
||||
setuptools/_vendor/inflect-7.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/inflect-7.3.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools/_vendor/inflect-7.3.1.dist-info/METADATA,sha256=ZgMNY0WAZRs-U8wZiV2SMfjSKqBrMngXyDMs_CAwMwg,21079
|
||||
setuptools/_vendor/inflect-7.3.1.dist-info/RECORD,sha256=XXg0rBuiYSxoAQUP3lenuYsPNqz4jDwtTzdv2JEbMJE,943
|
||||
setuptools/_vendor/inflect-7.3.1.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
|
||||
setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt,sha256=m52ujdp10CqT6jh1XQxZT6kEntcnv-7Tl7UiGNTzWZA,8
|
||||
setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA,sha256=xDtguJej0tN9iEXCUvxEJh2a7xceIRVBEakBLSr__tY,4020
|
||||
setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD,sha256=VRl7iKeEQyl7stgnp1uq50CzOJYlHYcoNdS0x17C9X4,641
|
||||
setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
|
||||
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
|
||||
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD,sha256=YyqnwE98S8wBwCevW5vHb-iVj0oYEDW5V6O9MBS6JIs,843
|
||||
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
|
||||
setuptools/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/jaraco.text-3.12.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA,sha256=AzWdm6ViMfDOPoQMfLWn2zgBQSGJScyqeN29TcuWXVI,3658
|
||||
setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD,sha256=gW2UV0HcokYJk4jKPu10_AZnrLqjb3C1WbJJTDl5sfY,1500
|
||||
setuptools/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/jaraco.text-3.12.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
|
||||
setuptools/_vendor/more_itertools-10.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
|
||||
setuptools/_vendor/more_itertools-10.3.0.dist-info/METADATA,sha256=BFO90O-fLNiVQMpj7oIS5ztzgJUUQZ3TA32P5HH3N-A,36293
|
||||
setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD,sha256=d8jnPgGNwP1-ntbICwWkQEVF9kH7CFIgzkKzaLWao9M,1259
|
||||
setuptools/_vendor/more_itertools-10.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
|
||||
setuptools/_vendor/packaging-24.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/packaging-24.1.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
|
||||
setuptools/_vendor/packaging-24.1.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
||||
setuptools/_vendor/packaging-24.1.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
|
||||
setuptools/_vendor/packaging-24.1.dist-info/METADATA,sha256=X3ooO3WnCfzNSBrqQjefCD1POAF1M2WSLmsHMgQlFdk,3204
|
||||
setuptools/_vendor/packaging-24.1.dist-info/RECORD,sha256=OKaJgwYpNifIHistipPl9oV9X37bc_AzTo2aU9rVO24,2565
|
||||
setuptools/_vendor/packaging-24.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/packaging-24.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
||||
setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA,sha256=zmsie01G1MtXR0wgIv5XpVeTO7idr0WWvfmxKsKWuGk,11429
|
||||
setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD,sha256=TCEddtQu1A78Os_Mhm2JEqcYr7yit-UYSUQjZtbpn-g,1642
|
||||
setuptools/_vendor/platformdirs-4.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
|
||||
setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE,sha256=uAgWsNUwuKzLTCIReDeQmEpuO2GSLCte6S8zcqsnQv4,1072
|
||||
setuptools/_vendor/tomli-2.0.1.dist-info/METADATA,sha256=zPDceKmPwJGLWtZykrHixL7WVXWmJGzZ1jyRT5lCoPI,8875
|
||||
setuptools/_vendor/tomli-2.0.1.dist-info/RECORD,sha256=DLn5pFGh42WsVLTIhmLh2gy1SnLRalJY-wq_-dPhwCI,999
|
||||
setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81
|
||||
setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130
|
||||
setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA,sha256=z2dcHAp0TwhYCFU5Deh8x31nazElgujUz9tbuP0pjSE,3717
|
||||
setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD,sha256=SKUZWVgkeDUidUKM7s1473fXmsna55bjmi6vJUAoJVI,2402
|
||||
setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt,sha256=qp7NQ1aLtiSgMQqo6gWlfGpy0IIXzoMJmeQTLpzqFZQ,48
|
||||
setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt,sha256=4z28AhuDodwRS_c1J_l8H51t5QuwfTseskYzlxp6grs,10
|
||||
setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
|
||||
setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA,sha256=BeUQIa8cnYbrjWx-N8TOznM9UGW5Gm2DicVpDtRA8W0,3018
|
||||
setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD,sha256=dxAALYGXHmMqpqL8M9xddKr118quIgQKZdPjFQOwXuk,571
|
||||
setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
||||
setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt,sha256=MMI2GGeRCPPo6h0qZYx8pBe9_IkcmO8aifpP8MmChlQ,1107
|
||||
setuptools/_vendor/wheel-0.43.0.dist-info/METADATA,sha256=WbrCKwClnT5WCKVrjPjvxDgxo2tyeS7kOJyc1GaceEE,2153
|
||||
setuptools/_vendor/wheel-0.43.0.dist-info/RECORD,sha256=eD5lR0JhGviM2fAL8BpDGiGdtTZVbmP_mBx71nMHCsk,4557
|
||||
setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
||||
setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt,sha256=rTY1BbkPHhkGMm4Q3F0pIzJBzW2kMxoG1oriffvGdA0,104
|
||||
setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools/_vendor/zipp-3.19.2.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
|
||||
setuptools/_vendor/zipp-3.19.2.dist-info/METADATA,sha256=UIrk_kMIHGSwsKKChYizqMw0MMZpPRZ2ZiVpQAsN_bE,3575
|
||||
setuptools/_vendor/zipp-3.19.2.dist-info/RECORD,sha256=8xby4D_ZrefrvAsVRwaEjiu4_VaLkJNRCfDY484rm_4,1039
|
||||
setuptools/_vendor/zipp-3.19.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
setuptools/_vendor/zipp-3.19.2.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
|
||||
setuptools-74.0.0.dist-info/RECORD,,
|
5
third_party/python/setuptools/setuptools-74.0.0.dist-info/WHEEL
vendored
Normal file
5
third_party/python/setuptools/setuptools-74.0.0.dist-info/WHEEL
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (74.0.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
@ -2,6 +2,7 @@
|
||||
alias = setuptools.command.alias:alias
|
||||
bdist_egg = setuptools.command.bdist_egg:bdist_egg
|
||||
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
|
||||
bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel
|
||||
build = setuptools.command.build:build
|
||||
build_clib = setuptools.command.build_clib:build_clib
|
||||
build_ext = setuptools.command.build_ext:build_ext
|
||||
@ -19,7 +20,6 @@ rotate = setuptools.command.rotate:rotate
|
||||
saveopts = setuptools.command.saveopts:saveopts
|
||||
sdist = setuptools.command.sdist:sdist
|
||||
setopt = setuptools.command.setopt:setopt
|
||||
test = setuptools.command.test:test
|
||||
upload_docs = setuptools.command.upload_docs:upload_docs
|
||||
|
||||
[distutils.setup_keywords]
|
||||
@ -35,10 +35,6 @@ package_data = setuptools.dist:check_package_data
|
||||
packages = setuptools.dist:check_packages
|
||||
python_requires = setuptools.dist:check_specifier
|
||||
setup_requires = setuptools.dist:check_requirements
|
||||
test_loader = setuptools.dist:check_importable
|
||||
test_runner = setuptools.dist:check_importable
|
||||
test_suite = setuptools.dist:check_test_suite
|
||||
tests_require = setuptools.dist:check_requirements
|
||||
use_2to3 = setuptools.dist:invalid_unless_false
|
||||
zip_safe = setuptools.dist:assert_bool
|
||||
|
108
third_party/python/setuptools/setuptools/__init__.py
vendored
108
third_party/python/setuptools/setuptools/__init__.py
vendored
@ -1,22 +1,37 @@
|
||||
"""Extensions to the 'distutils' for large or complex distributions"""
|
||||
# mypy: disable_error_code=override
|
||||
# Command.reinitialize_command has an extra **kw param that distutils doesn't have
|
||||
# Can't disable on the exact line because distutils doesn't exists on Python 3.12
|
||||
# and mypy isn't aware of distutils_hack, causing distutils.core.Command to be Any,
|
||||
# and a [unused-ignore] to be raised on 3.12+
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Mapping
|
||||
from typing import TYPE_CHECKING, TypeVar, overload
|
||||
|
||||
sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path]) # fmt: skip
|
||||
# workaround for #4476
|
||||
sys.modules.pop('backports', None)
|
||||
|
||||
import _distutils_hack.override # noqa: F401
|
||||
import distutils.core
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
from . import logging, monkey
|
||||
from . import version as _version_module
|
||||
from .depends import Require
|
||||
from .discovery import PackageFinder, PEP420PackageFinder
|
||||
from .dist import Distribution
|
||||
from .extension import Extension
|
||||
from .version import __version__ as __version__
|
||||
from .warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
import distutils.core
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
__all__ = [
|
||||
'setup',
|
||||
'Distribution',
|
||||
@ -28,11 +43,10 @@ __all__ = [
|
||||
'find_namespace_packages',
|
||||
]
|
||||
|
||||
__version__ = _version_module.__version__
|
||||
_CommandT = TypeVar("_CommandT", bound="_Command")
|
||||
|
||||
bootstrap_install_from = None
|
||||
|
||||
|
||||
find_packages = PackageFinder.find
|
||||
find_namespace_packages = PEP420PackageFinder.find
|
||||
|
||||
@ -46,7 +60,7 @@ def _install_setup_requires(attrs):
|
||||
fetch_build_eggs interface.
|
||||
"""
|
||||
|
||||
def __init__(self, attrs):
|
||||
def __init__(self, attrs: Mapping[str, object]):
|
||||
_incl = 'dependency_links', 'setup_requires'
|
||||
filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
|
||||
super().__init__(filtered)
|
||||
@ -57,9 +71,9 @@ def _install_setup_requires(attrs):
|
||||
"""Ignore ``pyproject.toml``, they are not related to setup_requires"""
|
||||
try:
|
||||
cfg, toml = super()._split_standard_project_metadata(filenames)
|
||||
return cfg, ()
|
||||
except Exception:
|
||||
return filenames, ()
|
||||
return cfg, ()
|
||||
|
||||
def finalize_options(self):
|
||||
"""
|
||||
@ -106,8 +120,10 @@ def setup(**attrs):
|
||||
setup.__doc__ = distutils.core.setup.__doc__
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962
|
||||
_Command = distutils.core.Command
|
||||
_Command: TypeAlias = distutils.core.Command
|
||||
else:
|
||||
_Command = monkey.get_unpatched(distutils.core.Command)
|
||||
|
||||
@ -125,42 +141,25 @@ class Command(_Command):
|
||||
When creating a new command from scratch, custom defined classes **SHOULD** inherit
|
||||
from ``setuptools.Command`` and implement a few mandatory methods.
|
||||
Between these mandatory methods, are listed:
|
||||
|
||||
.. method:: initialize_options(self)
|
||||
|
||||
Set or (reset) all options/attributes/caches used by the command
|
||||
to their default values. Note that these values may be overwritten during
|
||||
the build.
|
||||
|
||||
.. method:: finalize_options(self)
|
||||
|
||||
Set final values for all options/attributes used by the command.
|
||||
Most of the time, each option/attribute/cache should only be set if it does not
|
||||
have any value yet (e.g. ``if self.attr is None: self.attr = val``).
|
||||
|
||||
.. method:: run(self)
|
||||
|
||||
Execute the actions intended by the command.
|
||||
(Side effects **SHOULD** only take place when ``run`` is executed,
|
||||
for example, creating new files or writing to the terminal output).
|
||||
:meth:`initialize_options`, :meth:`finalize_options` and :meth:`run`.
|
||||
|
||||
A useful analogy for command classes is to think of them as subroutines with local
|
||||
variables called "options". The options are "declared" in ``initialize_options()``
|
||||
and "defined" (given their final values, aka "finalized") in ``finalize_options()``,
|
||||
variables called "options". The options are "declared" in :meth:`initialize_options`
|
||||
and "defined" (given their final values, aka "finalized") in :meth:`finalize_options`,
|
||||
both of which must be defined by every command class. The "body" of the subroutine,
|
||||
(where it does all the work) is the ``run()`` method.
|
||||
Between ``initialize_options()`` and ``finalize_options()``, ``setuptools`` may set
|
||||
(where it does all the work) is the :meth:`run` method.
|
||||
Between :meth:`initialize_options` and :meth:`finalize_options`, ``setuptools`` may set
|
||||
the values for options/attributes based on user's input (or circumstance),
|
||||
which means that the implementation should be careful to not overwrite values in
|
||||
``finalize_options`` unless necessary.
|
||||
:meth:`finalize_options` unless necessary.
|
||||
|
||||
Please note that other commands (or other parts of setuptools) may also overwrite
|
||||
the values of the command's options/attributes multiple times during the build
|
||||
process.
|
||||
Therefore it is important to consistently implement ``initialize_options()`` and
|
||||
``finalize_options()``. For example, all derived attributes (or attributes that
|
||||
Therefore it is important to consistently implement :meth:`initialize_options` and
|
||||
:meth:`finalize_options`. For example, all derived attributes (or attributes that
|
||||
depend on the value of other attributes) **SHOULD** be recomputed in
|
||||
``finalize_options``.
|
||||
:meth:`finalize_options`.
|
||||
|
||||
When overwriting existing commands, custom defined classes **MUST** abide by the
|
||||
same APIs implemented by the original class. They also **SHOULD** inherit from the
|
||||
@ -216,11 +215,48 @@ class Command(_Command):
|
||||
"'%s' must be a list of strings (got %r)" % (option, val)
|
||||
)
|
||||
|
||||
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: str, reinit_subcommands: bool = False, **kw
|
||||
) -> _Command: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: _CommandT, reinit_subcommands: bool = False, **kw
|
||||
) -> _CommandT: ...
|
||||
def reinitialize_command(
|
||||
self, command: str | _Command, reinit_subcommands: bool = False, **kw
|
||||
) -> _Command:
|
||||
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
||||
vars(cmd).update(kw)
|
||||
return cmd
|
||||
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None:
|
||||
"""
|
||||
Set or (reset) all options/attributes/caches used by the command
|
||||
to their default values. Note that these values may be overwritten during
|
||||
the build.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None:
|
||||
"""
|
||||
Set final values for all options/attributes used by the command.
|
||||
Most of the time, each option/attribute/cache should only be set if it does not
|
||||
have any value yet (e.g. ``if self.attr is None: self.attr = val``).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def run(self) -> None:
|
||||
"""
|
||||
Execute the actions intended by the command.
|
||||
(Side effects **SHOULD** only take place when :meth:`run` is executed,
|
||||
for example, creating new files or writing to the terminal output).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def _find_all_simple(path):
|
||||
"""
|
||||
|
@ -4,23 +4,25 @@ Handling of Core Metadata for Python packages (including reading and writing).
|
||||
See: https://packaging.python.org/en/latest/specifications/core-metadata/
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import stat
|
||||
import textwrap
|
||||
from email import message_from_file
|
||||
from email.message import Message
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Optional, List
|
||||
|
||||
from distutils.util import rfc822_escape
|
||||
from packaging.markers import Marker
|
||||
from packaging.requirements import Requirement
|
||||
from packaging.utils import canonicalize_name, canonicalize_version
|
||||
from packaging.version import Version
|
||||
|
||||
from . import _normalization, _reqs
|
||||
from .extern.packaging.markers import Marker
|
||||
from .extern.packaging.requirements import Requirement
|
||||
from .extern.packaging.utils import canonicalize_name
|
||||
from .extern.packaging.version import Version
|
||||
from .warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
from distutils.util import rfc822_escape
|
||||
|
||||
|
||||
def get_metadata_version(self):
|
||||
mv = getattr(self, 'metadata_version', None)
|
||||
@ -38,7 +40,7 @@ def rfc822_unescape(content: str) -> str:
|
||||
return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:]))))
|
||||
|
||||
|
||||
def _read_field_from_msg(msg: Message, field: str) -> Optional[str]:
|
||||
def _read_field_from_msg(msg: Message, field: str) -> str | None:
|
||||
"""Read Message header field."""
|
||||
value = msg[field]
|
||||
if value == 'UNKNOWN':
|
||||
@ -46,7 +48,7 @@ def _read_field_from_msg(msg: Message, field: str) -> Optional[str]:
|
||||
return value
|
||||
|
||||
|
||||
def _read_field_unescaped_from_msg(msg: Message, field: str) -> Optional[str]:
|
||||
def _read_field_unescaped_from_msg(msg: Message, field: str) -> str | None:
|
||||
"""Read Message header field and apply rfc822_unescape."""
|
||||
value = _read_field_from_msg(msg, field)
|
||||
if value is None:
|
||||
@ -54,7 +56,7 @@ def _read_field_unescaped_from_msg(msg: Message, field: str) -> Optional[str]:
|
||||
return rfc822_unescape(value)
|
||||
|
||||
|
||||
def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]:
|
||||
def _read_list_from_msg(msg: Message, field: str) -> list[str] | None:
|
||||
"""Read Message header field and return all results as list."""
|
||||
values = msg.get_all(field, None)
|
||||
if values == []:
|
||||
@ -62,7 +64,7 @@ def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]:
|
||||
return values
|
||||
|
||||
|
||||
def _read_payload_from_msg(msg: Message) -> Optional[str]:
|
||||
def _read_payload_from_msg(msg: Message) -> str | None:
|
||||
value = str(msg.get_payload()).strip()
|
||||
if value == 'UNKNOWN' or not value:
|
||||
return None
|
||||
@ -262,7 +264,23 @@ def _write_provides_extra(file, processed_extras, safe, unsafe):
|
||||
|
||||
# from pypa/distutils#244; needed only until that logic is always available
|
||||
def get_fullname(self):
|
||||
return _distribution_fullname(self.get_name(), self.get_version())
|
||||
|
||||
|
||||
def _distribution_fullname(name: str, version: str) -> str:
|
||||
"""
|
||||
>>> _distribution_fullname('setup.tools', '1.0-2')
|
||||
'setup_tools-1.0.post2'
|
||||
>>> _distribution_fullname('setup-tools', '1.2post2')
|
||||
'setup_tools-1.2.post2'
|
||||
>>> _distribution_fullname('setup-tools', '1.0-r2')
|
||||
'setup_tools-1.0.post2'
|
||||
>>> _distribution_fullname('setup.tools', '1.0.post')
|
||||
'setup_tools-1.0.post0'
|
||||
>>> _distribution_fullname('setup.tools', '1.0+ubuntu-1')
|
||||
'setup_tools-1.0+ubuntu.1'
|
||||
"""
|
||||
return "{}-{}".format(
|
||||
canonicalize_name(self.get_name()).replace('-', '_'),
|
||||
self.get_version(),
|
||||
canonicalize_name(name).replace('-', '_'),
|
||||
canonicalize_version(version, strip_trailing_zero=False),
|
||||
)
|
||||
|
@ -1,11 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
|
||||
# from jaraco.collections 3.5.1
|
||||
@ -60,144 +56,3 @@ class DictStack(list, collections.abc.Mapping):
|
||||
|
||||
def __len__(self):
|
||||
return len(list(iter(self)))
|
||||
|
||||
|
||||
# from jaraco.collections 5.0.1
|
||||
class RangeMap(dict):
|
||||
"""
|
||||
A dictionary-like object that uses the keys as bounds for a range.
|
||||
Inclusion of the value for that range is determined by the
|
||||
key_match_comparator, which defaults to less-than-or-equal.
|
||||
A value is returned for a key if it is the first key that matches in
|
||||
the sorted list of keys.
|
||||
|
||||
One may supply keyword parameters to be passed to the sort function used
|
||||
to sort keys (i.e. key, reverse) as sort_params.
|
||||
|
||||
Create a map that maps 1-3 -> 'a', 4-6 -> 'b'
|
||||
|
||||
>>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy
|
||||
>>> r[1], r[2], r[3], r[4], r[5], r[6]
|
||||
('a', 'a', 'a', 'b', 'b', 'b')
|
||||
|
||||
Even float values should work so long as the comparison operator
|
||||
supports it.
|
||||
|
||||
>>> r[4.5]
|
||||
'b'
|
||||
|
||||
Notice that the way rangemap is defined, it must be open-ended
|
||||
on one side.
|
||||
|
||||
>>> r[0]
|
||||
'a'
|
||||
>>> r[-1]
|
||||
'a'
|
||||
|
||||
One can close the open-end of the RangeMap by using undefined_value
|
||||
|
||||
>>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
|
||||
>>> r[0]
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
KeyError: 0
|
||||
|
||||
One can get the first or last elements in the range by using RangeMap.Item
|
||||
|
||||
>>> last_item = RangeMap.Item(-1)
|
||||
>>> r[last_item]
|
||||
'b'
|
||||
|
||||
.last_item is a shortcut for Item(-1)
|
||||
|
||||
>>> r[RangeMap.last_item]
|
||||
'b'
|
||||
|
||||
Sometimes it's useful to find the bounds for a RangeMap
|
||||
|
||||
>>> r.bounds()
|
||||
(0, 6)
|
||||
|
||||
RangeMap supports .get(key, default)
|
||||
|
||||
>>> r.get(0, 'not found')
|
||||
'not found'
|
||||
|
||||
>>> r.get(7, 'not found')
|
||||
'not found'
|
||||
|
||||
One often wishes to define the ranges by their left-most values,
|
||||
which requires use of sort params and a key_match_comparator.
|
||||
|
||||
>>> r = RangeMap({1: 'a', 4: 'b'},
|
||||
... sort_params=dict(reverse=True),
|
||||
... key_match_comparator=operator.ge)
|
||||
>>> r[1], r[2], r[3], r[4], r[5], r[6]
|
||||
('a', 'a', 'a', 'b', 'b', 'b')
|
||||
|
||||
That wasn't nearly as easy as before, so an alternate constructor
|
||||
is provided:
|
||||
|
||||
>>> r = RangeMap.left({1: 'a', 4: 'b', 7: RangeMap.undefined_value})
|
||||
>>> r[1], r[2], r[3], r[4], r[5], r[6]
|
||||
('a', 'a', 'a', 'b', 'b', 'b')
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
source,
|
||||
sort_params: Mapping[str, Any] = {},
|
||||
key_match_comparator=operator.le,
|
||||
):
|
||||
dict.__init__(self, source)
|
||||
self.sort_params = sort_params
|
||||
self.match = key_match_comparator
|
||||
|
||||
@classmethod
|
||||
def left(cls, source):
|
||||
return cls(
|
||||
source, sort_params=dict(reverse=True), key_match_comparator=operator.ge
|
||||
)
|
||||
|
||||
def __getitem__(self, item):
|
||||
sorted_keys = sorted(self.keys(), **self.sort_params)
|
||||
if isinstance(item, RangeMap.Item):
|
||||
result = self.__getitem__(sorted_keys[item])
|
||||
else:
|
||||
key = self._find_first_match_(sorted_keys, item)
|
||||
result = dict.__getitem__(self, key)
|
||||
if result is RangeMap.undefined_value:
|
||||
raise KeyError(key)
|
||||
return result
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""
|
||||
Return the value for key if key is in the dictionary, else default.
|
||||
If default is not given, it defaults to None, so that this method
|
||||
never raises a KeyError.
|
||||
"""
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def _find_first_match_(self, keys, item):
|
||||
is_match = functools.partial(self.match, item)
|
||||
matches = list(filter(is_match, keys))
|
||||
if matches:
|
||||
return matches[0]
|
||||
raise KeyError(item)
|
||||
|
||||
def bounds(self):
|
||||
sorted_keys = sorted(self.keys(), **self.sort_params)
|
||||
return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item])
|
||||
|
||||
# some special values for the RangeMap
|
||||
undefined_value = type('RangeValueUndefined', (), {})()
|
||||
|
||||
class Item(int):
|
||||
"RangeMap Item"
|
||||
|
||||
first_item = Item(0)
|
||||
last_item = Item(-1)
|
||||
|
@ -4,8 +4,8 @@ import functools
|
||||
import os.path
|
||||
|
||||
from ._functools import splat
|
||||
from .compat.py39 import zip_strict
|
||||
from .errors import DistutilsFileError
|
||||
from .py39compat import zip_strict
|
||||
|
||||
|
||||
def _newer(source, target):
|
||||
@ -24,7 +24,7 @@ def newer(source, target):
|
||||
Raises DistutilsFileError if 'source' does not exist.
|
||||
"""
|
||||
if not os.path.exists(source):
|
||||
raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source))
|
||||
raise DistutilsFileError(f"file '{os.path.abspath(source)}' does not exist")
|
||||
|
||||
return _newer(source, target)
|
||||
|
||||
@ -63,7 +63,7 @@ def newer_group(sources, target, missing='error'):
|
||||
return missing == 'newer' and not os.path.exists(source)
|
||||
|
||||
ignored = os.path.exists if missing == 'ignore' else None
|
||||
return any(
|
||||
return not os.path.exists(target) or any(
|
||||
missing_as_newer(source) or _newer(source, target)
|
||||
for source in filter(ignored, sources)
|
||||
)
|
||||
|
@ -33,7 +33,7 @@ from .errors import (
|
||||
LibError,
|
||||
LinkError,
|
||||
)
|
||||
from .util import get_platform
|
||||
from .util import get_host_platform, get_platform
|
||||
|
||||
|
||||
def _find_vc2015():
|
||||
@ -79,32 +79,40 @@ def _find_vc2017():
|
||||
if not root:
|
||||
return None, None
|
||||
|
||||
try:
|
||||
path = subprocess.check_output(
|
||||
[
|
||||
os.path.join(
|
||||
root, "Microsoft Visual Studio", "Installer", "vswhere.exe"
|
||||
),
|
||||
"-latest",
|
||||
"-prerelease",
|
||||
"-requires",
|
||||
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
||||
"-property",
|
||||
"installationPath",
|
||||
"-products",
|
||||
"*",
|
||||
],
|
||||
encoding="mbcs",
|
||||
errors="strict",
|
||||
).strip()
|
||||
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
||||
return None, None
|
||||
variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64'
|
||||
suitable_components = (
|
||||
f"Microsoft.VisualStudio.Component.VC.Tools.{variant}",
|
||||
"Microsoft.VisualStudio.Workload.WDExpress",
|
||||
)
|
||||
|
||||
path = os.path.join(path, "VC", "Auxiliary", "Build")
|
||||
if os.path.isdir(path):
|
||||
return 15, path
|
||||
for component in suitable_components:
|
||||
# Workaround for `-requiresAny` (only available on VS 2017 > 15.6)
|
||||
with contextlib.suppress(
|
||||
subprocess.CalledProcessError, OSError, UnicodeDecodeError
|
||||
):
|
||||
path = (
|
||||
subprocess.check_output([
|
||||
os.path.join(
|
||||
root, "Microsoft Visual Studio", "Installer", "vswhere.exe"
|
||||
),
|
||||
"-latest",
|
||||
"-prerelease",
|
||||
"-requires",
|
||||
component,
|
||||
"-property",
|
||||
"installationPath",
|
||||
"-products",
|
||||
"*",
|
||||
])
|
||||
.decode(encoding="mbcs", errors="strict")
|
||||
.strip()
|
||||
)
|
||||
|
||||
return None, None
|
||||
path = os.path.join(path, "VC", "Auxiliary", "Build")
|
||||
if os.path.isdir(path):
|
||||
return 15, path
|
||||
|
||||
return None, None # no suitable component found
|
||||
|
||||
|
||||
PLAT_SPEC_TO_RUNTIME = {
|
||||
@ -140,7 +148,11 @@ def _get_vc_env(plat_spec):
|
||||
|
||||
vcvarsall, _ = _find_vcvarsall(plat_spec)
|
||||
if not vcvarsall:
|
||||
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
|
||||
raise DistutilsPlatformError(
|
||||
'Microsoft Visual C++ 14.0 or greater is required. '
|
||||
'Get it with "Microsoft C++ Build Tools": '
|
||||
'https://visualstudio.microsoft.com/visual-cpp-build-tools/'
|
||||
)
|
||||
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
@ -178,17 +190,43 @@ def _find_exe(exe, paths=None):
|
||||
return exe
|
||||
|
||||
|
||||
# A map keyed by get_platform() return values to values accepted by
|
||||
# 'vcvarsall.bat'. Always cross-compile from x86 to work with the
|
||||
# lighter-weight MSVC installs that do not include native 64-bit tools.
|
||||
PLAT_TO_VCVARS = {
|
||||
_vcvars_names = {
|
||||
'win32': 'x86',
|
||||
'win-amd64': 'x86_amd64',
|
||||
'win-arm32': 'x86_arm',
|
||||
'win-arm64': 'x86_arm64',
|
||||
'win-amd64': 'amd64',
|
||||
'win-arm32': 'arm',
|
||||
'win-arm64': 'arm64',
|
||||
}
|
||||
|
||||
|
||||
def _get_vcvars_spec(host_platform, platform):
|
||||
"""
|
||||
Given a host platform and platform, determine the spec for vcvarsall.
|
||||
|
||||
Uses the native MSVC host if the host platform would need expensive
|
||||
emulation for x86.
|
||||
|
||||
>>> _get_vcvars_spec('win-arm64', 'win32')
|
||||
'arm64_x86'
|
||||
>>> _get_vcvars_spec('win-arm64', 'win-amd64')
|
||||
'arm64_amd64'
|
||||
|
||||
Otherwise, always cross-compile from x86 to work with the
|
||||
lighter-weight MSVC installs that do not include native 64-bit tools.
|
||||
|
||||
>>> _get_vcvars_spec('win32', 'win32')
|
||||
'x86'
|
||||
>>> _get_vcvars_spec('win-arm32', 'win-arm32')
|
||||
'x86_arm'
|
||||
>>> _get_vcvars_spec('win-amd64', 'win-arm64')
|
||||
'x86_arm64'
|
||||
"""
|
||||
if host_platform != 'win-arm64':
|
||||
host_platform = 'win32'
|
||||
vc_hp = _vcvars_names[host_platform]
|
||||
vc_plat = _vcvars_names[platform]
|
||||
return vc_hp if vc_hp == vc_plat else f'{vc_hp}_{vc_plat}'
|
||||
|
||||
|
||||
class MSVCCompiler(CCompiler):
|
||||
"""Concrete class that implements an interface to Microsoft Visual C++,
|
||||
as defined by the CCompiler abstract class."""
|
||||
@ -218,7 +256,7 @@ class MSVCCompiler(CCompiler):
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
def __init__(self, verbose=False, dry_run=False, force=False):
|
||||
super().__init__(verbose, dry_run, force)
|
||||
# target platform (.plat_name is consistent with 'bdist')
|
||||
self.plat_name = None
|
||||
@ -242,13 +280,12 @@ class MSVCCompiler(CCompiler):
|
||||
if plat_name is None:
|
||||
plat_name = get_platform()
|
||||
# sanity check for platforms to prevent obscure errors later.
|
||||
if plat_name not in PLAT_TO_VCVARS:
|
||||
if plat_name not in _vcvars_names:
|
||||
raise DistutilsPlatformError(
|
||||
f"--plat-name must be one of {tuple(PLAT_TO_VCVARS)}"
|
||||
f"--plat-name must be one of {tuple(_vcvars_names)}"
|
||||
)
|
||||
|
||||
# Get the vcvarsall.bat spec for the requested platform.
|
||||
plat_spec = PLAT_TO_VCVARS[plat_name]
|
||||
plat_spec = _get_vcvars_spec(get_host_platform(), get_platform())
|
||||
|
||||
vc_env = _get_vc_env(plat_spec)
|
||||
if not vc_env:
|
||||
@ -334,7 +371,7 @@ class MSVCCompiler(CCompiler):
|
||||
output_dir=None,
|
||||
macros=None,
|
||||
include_dirs=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
depends=None,
|
||||
@ -423,7 +460,7 @@ class MSVCCompiler(CCompiler):
|
||||
return objects
|
||||
|
||||
def create_static_lib(
|
||||
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
||||
self, objects, output_libname, output_dir=None, debug=False, target_lang=None
|
||||
):
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
@ -452,7 +489,7 @@ class MSVCCompiler(CCompiler):
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
@ -551,7 +588,7 @@ class MSVCCompiler(CCompiler):
|
||||
def library_option(self, lib):
|
||||
return self.library_filename(lib)
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
# Prefer a debugging library if found (and requested), but deal
|
||||
# with it if we don't have one.
|
||||
if debug:
|
||||
|
3
third_party/python/setuptools/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE
vendored
Normal file
3
third_party/python/setuptools/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
This software is made available under the terms of *either* of the licenses
|
||||
found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
|
||||
under the terms of *both* these licenses.
|
177
third_party/python/setuptools/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
vendored
Normal file
177
third_party/python/setuptools/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
vendored
Normal file
@ -0,0 +1,177 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
23
third_party/python/setuptools/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD
vendored
Normal file
23
third_party/python/setuptools/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
Copyright (c) Donald Stufft and individual contributors.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -56,7 +56,13 @@ def _get_uid(name):
|
||||
|
||||
|
||||
def make_tarball(
|
||||
base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None
|
||||
base_name,
|
||||
base_dir,
|
||||
compress="gzip",
|
||||
verbose=False,
|
||||
dry_run=False,
|
||||
owner=None,
|
||||
group=None,
|
||||
):
|
||||
"""Create a (possibly compressed) tar file from all the files under
|
||||
'base_dir'.
|
||||
@ -113,7 +119,7 @@ def make_tarball(
|
||||
return tarinfo
|
||||
|
||||
if not dry_run:
|
||||
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
|
||||
tar = tarfile.open(archive_name, f'w|{tar_compression[compress]}')
|
||||
try:
|
||||
tar.add(base_dir, filter=_set_uid_gid)
|
||||
finally:
|
||||
@ -134,7 +140,7 @@ def make_tarball(
|
||||
return archive_name
|
||||
|
||||
|
||||
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): # noqa: C901
|
||||
def make_zipfile(base_name, base_dir, verbose=False, dry_run=False): # noqa: C901
|
||||
"""Create a zip file from all the files under 'base_dir'.
|
||||
|
||||
The output zip file will be named 'base_name' + ".zip". Uses either the
|
||||
@ -160,12 +166,9 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): # noqa: C901
|
||||
# XXX really should distinguish between "couldn't find
|
||||
# external 'zip' command" and "zip failed".
|
||||
raise DistutilsExecError(
|
||||
(
|
||||
"unable to create zip file '%s': "
|
||||
"could neither import the 'zipfile' module nor "
|
||||
"find a standalone zip utility"
|
||||
)
|
||||
% zip_filename
|
||||
f"unable to create zip file '{zip_filename}': "
|
||||
"could neither import the 'zipfile' module nor "
|
||||
"find a standalone zip utility"
|
||||
)
|
||||
|
||||
else:
|
||||
@ -224,8 +227,8 @@ def make_archive(
|
||||
format,
|
||||
root_dir=None,
|
||||
base_dir=None,
|
||||
verbose=0,
|
||||
dry_run=0,
|
||||
verbose=False,
|
||||
dry_run=False,
|
||||
owner=None,
|
||||
group=None,
|
||||
):
|
||||
@ -260,11 +263,10 @@ def make_archive(
|
||||
try:
|
||||
format_info = ARCHIVE_FORMATS[format]
|
||||
except KeyError:
|
||||
raise ValueError("unknown archive format '%s'" % format)
|
||||
raise ValueError(f"unknown archive format '{format}'")
|
||||
|
||||
func = format_info[0]
|
||||
for arg, val in format_info[1]:
|
||||
kwargs[arg] = val
|
||||
kwargs.update(format_info[1])
|
||||
|
||||
if format != 'zip':
|
||||
kwargs['owner'] = owner
|
||||
|
@ -61,7 +61,7 @@ class BCPPCompiler(CCompiler):
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
def __init__(self, verbose=False, dry_run=False, force=False):
|
||||
super().__init__(verbose, dry_run, force)
|
||||
|
||||
# These executables are assumed to all be in the path.
|
||||
@ -84,13 +84,13 @@ class BCPPCompiler(CCompiler):
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def compile( # noqa: C901
|
||||
def compile(
|
||||
self,
|
||||
sources,
|
||||
output_dir=None,
|
||||
macros=None,
|
||||
include_dirs=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
depends=None,
|
||||
@ -161,7 +161,7 @@ class BCPPCompiler(CCompiler):
|
||||
# compile ()
|
||||
|
||||
def create_static_lib(
|
||||
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
||||
self, objects, output_libname, output_dir=None, debug=False, target_lang=None
|
||||
):
|
||||
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
||||
output_filename = self.library_filename(output_libname, output_dir=output_dir)
|
||||
@ -189,7 +189,7 @@ class BCPPCompiler(CCompiler):
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
@ -234,11 +234,10 @@ class BCPPCompiler(CCompiler):
|
||||
head, tail = os.path.split(output_filename)
|
||||
modname, ext = os.path.splitext(tail)
|
||||
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
|
||||
def_file = os.path.join(temp_dir, '%s.def' % modname)
|
||||
def_file = os.path.join(temp_dir, f'{modname}.def')
|
||||
contents = ['EXPORTS']
|
||||
for sym in export_symbols or []:
|
||||
contents.append(f' {sym}=_{sym}')
|
||||
self.execute(write_file, (def_file, contents), "writing %s" % def_file)
|
||||
contents.extend(f' {sym}=_{sym}' for sym in export_symbols)
|
||||
self.execute(write_file, (def_file, contents), f"writing {def_file}")
|
||||
|
||||
# Borland C++ has problems with '/' in paths
|
||||
objects2 = map(os.path.normpath, objects)
|
||||
@ -254,7 +253,7 @@ class BCPPCompiler(CCompiler):
|
||||
objects.append(file)
|
||||
|
||||
for ell in library_dirs:
|
||||
ld_args.append("/L%s" % os.path.normpath(ell))
|
||||
ld_args.append(f"/L{os.path.normpath(ell)}")
|
||||
ld_args.append("/L.") # we sometimes use relative paths
|
||||
|
||||
# list of object files
|
||||
@ -313,7 +312,7 @@ class BCPPCompiler(CCompiler):
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
# List of effective library names to try, in order of preference:
|
||||
# xxx_bcpp.lib is better than xxx.lib
|
||||
# and xxx_d.lib is better than xxx.lib if debug is set
|
||||
@ -339,7 +338,7 @@ class BCPPCompiler(CCompiler):
|
||||
return None
|
||||
|
||||
# overwrite the one from CCompiler to support rc and res-files
|
||||
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
||||
def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
obj_names = []
|
||||
|
@ -6,6 +6,7 @@ for the Distutils compiler abstraction model."""
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
|
||||
from ._itertools import always_iterable
|
||||
@ -21,7 +22,7 @@ from .errors import (
|
||||
)
|
||||
from .file_util import move_file
|
||||
from .spawn import spawn
|
||||
from .util import execute, split_quoted
|
||||
from .util import execute, is_mingw, split_quoted
|
||||
|
||||
|
||||
class CCompiler:
|
||||
@ -104,7 +105,7 @@ class CCompiler:
|
||||
library dirs specific to this compiler class
|
||||
"""
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
def __init__(self, verbose=False, dry_run=False, force=False):
|
||||
self.dry_run = dry_run
|
||||
self.force = force
|
||||
self.verbose = verbose
|
||||
@ -188,24 +189,28 @@ class CCompiler:
|
||||
return None
|
||||
|
||||
def _check_macro_definitions(self, definitions):
|
||||
"""Ensures that every element of 'definitions' is a valid macro
|
||||
definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
|
||||
nothing if all definitions are OK, raise TypeError otherwise.
|
||||
"""
|
||||
"""Ensure that every element of 'definitions' is valid."""
|
||||
for defn in definitions:
|
||||
if not (
|
||||
isinstance(defn, tuple)
|
||||
and (
|
||||
len(defn) in (1, 2)
|
||||
and (isinstance(defn[1], str) or defn[1] is None)
|
||||
)
|
||||
and isinstance(defn[0], str)
|
||||
):
|
||||
raise TypeError(
|
||||
("invalid macro definition '%s': " % defn)
|
||||
+ "must be tuple (string,), (string, string), or "
|
||||
+ "(string, None)"
|
||||
)
|
||||
self._check_macro_definition(*defn)
|
||||
|
||||
def _check_macro_definition(self, defn):
|
||||
"""
|
||||
Raise a TypeError if defn is not valid.
|
||||
|
||||
A valid definition is either a (name, value) 2-tuple or a (name,) tuple.
|
||||
"""
|
||||
if not isinstance(defn, tuple) or not self._is_valid_macro(*defn):
|
||||
raise TypeError(
|
||||
f"invalid macro definition '{defn}': "
|
||||
"must be tuple (string,), (string, string), or (string, None)"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _is_valid_macro(name, value=None):
|
||||
"""
|
||||
A valid macro is a ``name : str`` and a ``value : str | None``.
|
||||
"""
|
||||
return isinstance(name, str) and isinstance(value, (str, types.NoneType))
|
||||
|
||||
# -- Bookkeeping methods -------------------------------------------
|
||||
|
||||
@ -342,7 +347,7 @@ class CCompiler:
|
||||
extra = []
|
||||
|
||||
# Get the list of expected output (object) files
|
||||
objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir)
|
||||
objects = self.object_filenames(sources, strip_dir=False, output_dir=outdir)
|
||||
assert len(objects) == len(sources)
|
||||
|
||||
pp_opts = gen_preprocess_options(macros, incdirs)
|
||||
@ -532,7 +537,7 @@ class CCompiler:
|
||||
output_dir=None,
|
||||
macros=None,
|
||||
include_dirs=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
depends=None,
|
||||
@ -609,7 +614,7 @@ class CCompiler:
|
||||
pass
|
||||
|
||||
def create_static_lib(
|
||||
self, objects, output_libname, output_dir=None, debug=0, target_lang=None
|
||||
self, objects, output_libname, output_dir=None, debug=False, target_lang=None
|
||||
):
|
||||
"""Link a bunch of stuff together to create a static library file.
|
||||
The "bunch of stuff" consists of the list of object files supplied
|
||||
@ -650,7 +655,7 @@ class CCompiler:
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
@ -712,7 +717,7 @@ class CCompiler:
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
@ -743,7 +748,7 @@ class CCompiler:
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
@ -773,7 +778,7 @@ class CCompiler:
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
target_lang=None,
|
||||
@ -859,7 +864,7 @@ class CCompiler:
|
||||
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
|
||||
with os.fdopen(fd, "w", encoding='utf-8') as f:
|
||||
for incl in includes:
|
||||
f.write("""#include "%s"\n""" % incl)
|
||||
f.write(f"""#include "{incl}"\n""")
|
||||
if not includes:
|
||||
# Use "char func(void);" as the prototype to follow
|
||||
# what autoconf does. This prototype does not match
|
||||
@ -869,22 +874,20 @@ class CCompiler:
|
||||
# know the exact argument types, and the has_function
|
||||
# interface does not provide that level of information.
|
||||
f.write(
|
||||
"""\
|
||||
f"""\
|
||||
#ifdef __cplusplus
|
||||
extern "C"
|
||||
#endif
|
||||
char %s(void);
|
||||
char {funcname}(void);
|
||||
"""
|
||||
% funcname
|
||||
)
|
||||
f.write(
|
||||
"""\
|
||||
int main (int argc, char **argv) {
|
||||
%s();
|
||||
f"""\
|
||||
int main (int argc, char **argv) {{
|
||||
{funcname}();
|
||||
return 0;
|
||||
}
|
||||
}}
|
||||
"""
|
||||
% funcname
|
||||
)
|
||||
|
||||
try:
|
||||
@ -909,7 +912,7 @@ int main (int argc, char **argv) {
|
||||
os.remove(fn)
|
||||
return True
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
"""Search the specified list of directories for a static or shared
|
||||
library file 'lib' and return the full path to that file. If
|
||||
'debug' true, look for a debugging version (if that makes sense on
|
||||
@ -952,7 +955,7 @@ int main (int argc, char **argv) {
|
||||
# * exe_extension -
|
||||
# extension for executable files, eg. '' or '.exe'
|
||||
|
||||
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
||||
def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
return list(
|
||||
@ -987,13 +990,13 @@ int main (int argc, char **argv) {
|
||||
# If abs, chop off leading /
|
||||
return no_drive[os.path.isabs(no_drive) :]
|
||||
|
||||
def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
|
||||
def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
|
||||
assert output_dir is not None
|
||||
if strip_dir:
|
||||
basename = os.path.basename(basename)
|
||||
return os.path.join(output_dir, basename + self.shared_lib_extension)
|
||||
|
||||
def executable_filename(self, basename, strip_dir=0, output_dir=''):
|
||||
def executable_filename(self, basename, strip_dir=False, output_dir=''):
|
||||
assert output_dir is not None
|
||||
if strip_dir:
|
||||
basename = os.path.basename(basename)
|
||||
@ -1003,7 +1006,7 @@ int main (int argc, char **argv) {
|
||||
self,
|
||||
libname,
|
||||
lib_type='static',
|
||||
strip_dir=0,
|
||||
strip_dir=False,
|
||||
output_dir='', # or 'shared'
|
||||
):
|
||||
assert output_dir is not None
|
||||
@ -1032,7 +1035,7 @@ int main (int argc, char **argv) {
|
||||
print(msg)
|
||||
|
||||
def warn(self, msg):
|
||||
sys.stderr.write("warning: %s\n" % msg)
|
||||
sys.stderr.write(f"warning: {msg}\n")
|
||||
|
||||
def execute(self, func, args, msg=None, level=1):
|
||||
execute(func, args, msg, self.dry_run)
|
||||
@ -1077,6 +1080,10 @@ def get_default_compiler(osname=None, platform=None):
|
||||
osname = os.name
|
||||
if platform is None:
|
||||
platform = sys.platform
|
||||
# Mingw is a special case where sys.platform is 'win32' but we
|
||||
# want to use the 'mingw32' compiler, so check it first
|
||||
if is_mingw():
|
||||
return 'mingw32'
|
||||
for pattern, compiler in _default_compilers:
|
||||
if (
|
||||
re.match(pattern, platform) is not None
|
||||
@ -1117,15 +1124,15 @@ def show_compilers():
|
||||
# commands that use it.
|
||||
from distutils.fancy_getopt import FancyGetopt
|
||||
|
||||
compilers = []
|
||||
for compiler in compiler_class.keys():
|
||||
compilers.append(("compiler=" + compiler, None, compiler_class[compiler][2]))
|
||||
compilers.sort()
|
||||
compilers = sorted(
|
||||
("compiler=" + compiler, None, compiler_class[compiler][2])
|
||||
for compiler in compiler_class.keys()
|
||||
)
|
||||
pretty_printer = FancyGetopt(compilers)
|
||||
pretty_printer.print_help("List of available compilers:")
|
||||
|
||||
|
||||
def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
|
||||
def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False):
|
||||
"""Generate an instance of some CCompiler subclass for the supplied
|
||||
platform/compiler combination. 'plat' defaults to 'os.name'
|
||||
(eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
|
||||
@ -1145,9 +1152,9 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
|
||||
|
||||
(module_name, class_name, long_description) = compiler_class[compiler]
|
||||
except KeyError:
|
||||
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
|
||||
msg = f"don't know how to compile C/C++ code on platform '{plat}'"
|
||||
if compiler is not None:
|
||||
msg = msg + " with '%s' compiler" % compiler
|
||||
msg = msg + f" with '{compiler}' compiler"
|
||||
raise DistutilsPlatformError(msg)
|
||||
|
||||
try:
|
||||
@ -1157,7 +1164,7 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
|
||||
klass = vars(module)[class_name]
|
||||
except ImportError:
|
||||
raise DistutilsModuleError(
|
||||
"can't compile C/C++ code: unable to load module '%s'" % module_name
|
||||
f"can't compile C/C++ code: unable to load module '{module_name}'"
|
||||
)
|
||||
except KeyError:
|
||||
raise DistutilsModuleError(
|
||||
@ -1196,23 +1203,22 @@ def gen_preprocess_options(macros, include_dirs):
|
||||
for macro in macros:
|
||||
if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
|
||||
raise TypeError(
|
||||
"bad macro definition '%s': "
|
||||
"each element of 'macros' list must be a 1- or 2-tuple" % macro
|
||||
f"bad macro definition '{macro}': "
|
||||
"each element of 'macros' list must be a 1- or 2-tuple"
|
||||
)
|
||||
|
||||
if len(macro) == 1: # undefine this macro
|
||||
pp_opts.append("-U%s" % macro[0])
|
||||
pp_opts.append(f"-U{macro[0]}")
|
||||
elif len(macro) == 2:
|
||||
if macro[1] is None: # define with no explicit value
|
||||
pp_opts.append("-D%s" % macro[0])
|
||||
pp_opts.append(f"-D{macro[0]}")
|
||||
else:
|
||||
# XXX *don't* need to be clever about quoting the
|
||||
# macro value here, because we're going to avoid the
|
||||
# shell at all costs when we spawn the command!
|
||||
pp_opts.append("-D{}={}".format(*macro))
|
||||
|
||||
for dir in include_dirs:
|
||||
pp_opts.append("-I%s" % dir)
|
||||
pp_opts.extend(f"-I{dir}" for dir in include_dirs)
|
||||
return pp_opts
|
||||
|
||||
|
||||
@ -1223,10 +1229,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
|
||||
directories. Returns a list of command-line options suitable for use
|
||||
with some compiler (depending on the two format strings passed in).
|
||||
"""
|
||||
lib_opts = []
|
||||
|
||||
for dir in library_dirs:
|
||||
lib_opts.append(compiler.library_dir_option(dir))
|
||||
lib_opts = [compiler.library_dir_option(dir) for dir in library_dirs]
|
||||
|
||||
for dir in runtime_library_dirs:
|
||||
lib_opts.extend(always_iterable(compiler.runtime_library_dir_option(dir)))
|
||||
@ -1245,7 +1248,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
|
||||
lib_opts.append(lib_file)
|
||||
else:
|
||||
compiler.warn(
|
||||
"no library file corresponding to '%s' found (skipping)" % lib
|
||||
f"no library file corresponding to '{lib}' found (skipping)"
|
||||
)
|
||||
else:
|
||||
lib_opts.append(compiler.library_option(lib))
|
||||
|
@ -87,13 +87,13 @@ class Command:
|
||||
|
||||
# The 'help' flag is just used for command-line parsing, so
|
||||
# none of that complicated bureaucracy is needed.
|
||||
self.help = 0
|
||||
self.help = False
|
||||
|
||||
# 'finalized' records whether or not 'finalize_options()' has been
|
||||
# called. 'finalize_options()' itself should not pay attention to
|
||||
# this flag: it is the business of 'ensure_finalized()', which
|
||||
# always calls 'finalize_options()', to respect/update it.
|
||||
self.finalized = 0
|
||||
self.finalized = False
|
||||
|
||||
# XXX A more explicit way to customize dry_run would be better.
|
||||
def __getattr__(self, attr):
|
||||
@ -109,7 +109,7 @@ class Command:
|
||||
def ensure_finalized(self):
|
||||
if not self.finalized:
|
||||
self.finalize_options()
|
||||
self.finalized = 1
|
||||
self.finalized = True
|
||||
|
||||
# Subclasses must define:
|
||||
# initialize_options()
|
||||
@ -135,7 +135,7 @@ class Command:
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"abstract method -- subclass %s must override" % self.__class__
|
||||
f"abstract method -- subclass {self.__class__} must override"
|
||||
)
|
||||
|
||||
def finalize_options(self):
|
||||
@ -150,14 +150,14 @@ class Command:
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"abstract method -- subclass %s must override" % self.__class__
|
||||
f"abstract method -- subclass {self.__class__} must override"
|
||||
)
|
||||
|
||||
def dump_options(self, header=None, indent=""):
|
||||
from distutils.fancy_getopt import longopt_xlate
|
||||
|
||||
if header is None:
|
||||
header = "command options for '%s':" % self.get_command_name()
|
||||
header = f"command options for '{self.get_command_name()}':"
|
||||
self.announce(indent + header, level=logging.INFO)
|
||||
indent = indent + " "
|
||||
for option, _, _ in self.user_options:
|
||||
@ -178,7 +178,7 @@ class Command:
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"abstract method -- subclass %s must override" % self.__class__
|
||||
f"abstract method -- subclass {self.__class__} must override"
|
||||
)
|
||||
|
||||
def announce(self, msg, level=logging.DEBUG):
|
||||
@ -293,7 +293,7 @@ class Command:
|
||||
if getattr(self, dst_option) is None:
|
||||
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
|
||||
|
||||
def get_finalized_command(self, command, create=1):
|
||||
def get_finalized_command(self, command, create=True):
|
||||
"""Wrapper around Distribution's 'get_command_obj()' method: find
|
||||
(create if necessary and 'create' is true) the command object for
|
||||
'command', call its 'ensure_finalized()' method, and return the
|
||||
@ -305,7 +305,7 @@ class Command:
|
||||
|
||||
# XXX rename to 'get_reinitialized_command()'? (should do the
|
||||
# same in dist.py, if so)
|
||||
def reinitialize_command(self, command, reinit_subcommands=0):
|
||||
def reinitialize_command(self, command, reinit_subcommands=False):
|
||||
return self.distribution.reinitialize_command(command, reinit_subcommands)
|
||||
|
||||
def run_command(self, command):
|
||||
@ -340,7 +340,13 @@ class Command:
|
||||
dir_util.mkpath(name, mode, dry_run=self.dry_run)
|
||||
|
||||
def copy_file(
|
||||
self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
|
||||
self,
|
||||
infile,
|
||||
outfile,
|
||||
preserve_mode=True,
|
||||
preserve_times=True,
|
||||
link=None,
|
||||
level=1,
|
||||
):
|
||||
"""Copy a file respecting verbose, dry-run and force flags. (The
|
||||
former two default to whatever is in the Distribution object, and
|
||||
@ -359,9 +365,9 @@ class Command:
|
||||
self,
|
||||
infile,
|
||||
outfile,
|
||||
preserve_mode=1,
|
||||
preserve_times=1,
|
||||
preserve_symlinks=0,
|
||||
preserve_mode=True,
|
||||
preserve_times=True,
|
||||
preserve_symlinks=False,
|
||||
level=1,
|
||||
):
|
||||
"""Copy an entire directory tree respecting verbose, dry-run,
|
||||
@ -381,7 +387,7 @@ class Command:
|
||||
"""Move a file respecting dry-run flag."""
|
||||
return file_util.move_file(src, dst, dry_run=self.dry_run)
|
||||
|
||||
def spawn(self, cmd, search_path=1, level=1):
|
||||
def spawn(self, cmd, search_path=True, level=1):
|
||||
"""Spawn an external command respecting dry-run flag."""
|
||||
from distutils.spawn import spawn
|
||||
|
||||
@ -412,7 +418,7 @@ class Command:
|
||||
timestamp checks.
|
||||
"""
|
||||
if skip_msg is None:
|
||||
skip_msg = "skipping %s (inputs unchanged)" % outfile
|
||||
skip_msg = f"skipping {outfile} (inputs unchanged)"
|
||||
|
||||
# Allow 'infiles' to be a single string
|
||||
if isinstance(infiles, str):
|
||||
|
@ -3,7 +3,7 @@
|
||||
Package containing implementation of all the standard Distutils
|
||||
commands."""
|
||||
|
||||
__all__ = [ # noqa: F822
|
||||
__all__ = [
|
||||
'build',
|
||||
'build_py',
|
||||
'build_ext',
|
||||
|
@ -15,9 +15,10 @@ def show_formats():
|
||||
"""Print list of available formats (arguments to "--format" option)."""
|
||||
from ..fancy_getopt import FancyGetopt
|
||||
|
||||
formats = []
|
||||
for format in bdist.format_commands:
|
||||
formats.append(("formats=" + format, None, bdist.format_commands[format][1]))
|
||||
formats = [
|
||||
("formats=" + format, None, bdist.format_commands[format][1])
|
||||
for format in bdist.format_commands
|
||||
]
|
||||
pretty_printer = FancyGetopt(formats)
|
||||
pretty_printer.print_help("List of available distribution formats:")
|
||||
|
||||
@ -41,7 +42,7 @@ class bdist(Command):
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform(),
|
||||
f"[default: {get_platform()}]",
|
||||
),
|
||||
('formats=', None, "formats for distribution (comma-separated list)"),
|
||||
(
|
||||
@ -94,7 +95,7 @@ class bdist(Command):
|
||||
self.plat_name = None
|
||||
self.formats = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = 0
|
||||
self.skip_build = False
|
||||
self.group = None
|
||||
self.owner = None
|
||||
|
||||
@ -120,7 +121,7 @@ class bdist(Command):
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create built distributions "
|
||||
"on platform %s" % os.name
|
||||
f"on platform {os.name}"
|
||||
)
|
||||
|
||||
if self.dist_dir is None:
|
||||
@ -133,7 +134,7 @@ class bdist(Command):
|
||||
try:
|
||||
commands.append(self.format_commands[format][0])
|
||||
except KeyError:
|
||||
raise DistutilsOptionError("invalid format '%s'" % format)
|
||||
raise DistutilsOptionError(f"invalid format '{format}'")
|
||||
|
||||
# Reinitialize and run each command.
|
||||
for i in range(len(self.formats)):
|
||||
@ -150,5 +151,5 @@ class bdist(Command):
|
||||
# If we're going to need to run this command again, tell it to
|
||||
# keep its temporary files around so subsequent runs go faster.
|
||||
if cmd_name in commands[i + 1 :]:
|
||||
sub_cmd.keep_temp = 1
|
||||
sub_cmd.keep_temp = True
|
||||
self.run_command(cmd_name)
|
||||
|
@ -23,7 +23,7 @@ class bdist_dumb(Command):
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform(),
|
||||
f"[default: {get_platform()}]",
|
||||
),
|
||||
(
|
||||
'format=',
|
||||
@ -33,15 +33,14 @@ class bdist_dumb(Command):
|
||||
(
|
||||
'keep-temp',
|
||||
'k',
|
||||
"keep the pseudo-installation tree around after "
|
||||
+ "creating the distribution archive",
|
||||
"keep the pseudo-installation tree around after creating the distribution archive",
|
||||
),
|
||||
('dist-dir=', 'd', "directory to put final built distributions in"),
|
||||
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
||||
(
|
||||
'relative',
|
||||
None,
|
||||
"build the archive using relative paths (default: false)",
|
||||
"build the archive using relative paths [default: false]",
|
||||
),
|
||||
(
|
||||
'owner=',
|
||||
@ -63,10 +62,10 @@ class bdist_dumb(Command):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.format = None
|
||||
self.keep_temp = 0
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
self.skip_build = None
|
||||
self.relative = 0
|
||||
self.relative = False
|
||||
self.owner = None
|
||||
self.group = None
|
||||
|
||||
@ -81,7 +80,7 @@ class bdist_dumb(Command):
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create dumb built distributions "
|
||||
"on platform %s" % os.name
|
||||
f"on platform {os.name}"
|
||||
)
|
||||
|
||||
self.set_undefined_options(
|
||||
@ -95,10 +94,10 @@ class bdist_dumb(Command):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install', reinit_subcommands=1)
|
||||
install = self.reinitialize_command('install', reinit_subcommands=True)
|
||||
install.root = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = 0
|
||||
install.warn_dir = False
|
||||
|
||||
log.info("installing to %s", self.bdist_dir)
|
||||
self.run_command('install')
|
||||
@ -116,7 +115,7 @@ class bdist_dumb(Command):
|
||||
):
|
||||
raise DistutilsPlatformError(
|
||||
"can't make a dumb built distribution where "
|
||||
f"base and platbase are different ({repr(install.install_base)}, {repr(install.install_platbase)})"
|
||||
f"base and platbase are different ({install.install_base!r}, {install.install_platbase!r})"
|
||||
)
|
||||
else:
|
||||
archive_root = os.path.join(
|
||||
|
@ -40,7 +40,7 @@ class bdist_rpm(Command):
|
||||
'python=',
|
||||
None,
|
||||
"path to Python interpreter to hard-code in the .spec file "
|
||||
"(default: \"python\")",
|
||||
"[default: \"python\"]",
|
||||
),
|
||||
(
|
||||
'fix-python',
|
||||
@ -187,13 +187,13 @@ class bdist_rpm(Command):
|
||||
self.build_requires = None
|
||||
self.obsoletes = None
|
||||
|
||||
self.keep_temp = 0
|
||||
self.use_rpm_opt_flags = 1
|
||||
self.rpm3_mode = 1
|
||||
self.no_autoreq = 0
|
||||
self.keep_temp = False
|
||||
self.use_rpm_opt_flags = True
|
||||
self.rpm3_mode = True
|
||||
self.no_autoreq = False
|
||||
|
||||
self.force_arch = None
|
||||
self.quiet = 0
|
||||
self.quiet = False
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
||||
@ -214,7 +214,7 @@ class bdist_rpm(Command):
|
||||
|
||||
if os.name != 'posix':
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create RPM distributions on platform %s" % os.name
|
||||
f"don't know how to create RPM distributions on platform {os.name}"
|
||||
)
|
||||
if self.binary_only and self.source_only:
|
||||
raise DistutilsOptionError(
|
||||
@ -223,7 +223,7 @@ class bdist_rpm(Command):
|
||||
|
||||
# don't pass CFLAGS to pure python distributions
|
||||
if not self.distribution.has_ext_modules():
|
||||
self.use_rpm_opt_flags = 0
|
||||
self.use_rpm_opt_flags = False
|
||||
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
self.finalize_package_data()
|
||||
@ -295,9 +295,9 @@ class bdist_rpm(Command):
|
||||
|
||||
# Spec file goes into 'dist_dir' if '--spec-only specified',
|
||||
# build/rpm.<plat> otherwise.
|
||||
spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name())
|
||||
spec_path = os.path.join(spec_dir, f"{self.distribution.get_name()}.spec")
|
||||
self.execute(
|
||||
write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path
|
||||
write_file, (spec_path, self._make_spec_file()), f"writing '{spec_path}'"
|
||||
)
|
||||
|
||||
if self.spec_only: # stop if requested
|
||||
@ -322,7 +322,7 @@ class bdist_rpm(Command):
|
||||
if os.path.exists(self.icon):
|
||||
self.copy_file(self.icon, source_dir)
|
||||
else:
|
||||
raise DistutilsFileError("icon file '%s' does not exist" % self.icon)
|
||||
raise DistutilsFileError(f"icon file '{self.icon}' does not exist")
|
||||
|
||||
# build package
|
||||
log.info("building RPMs")
|
||||
@ -334,9 +334,9 @@ class bdist_rpm(Command):
|
||||
rpm_cmd.append('-bb')
|
||||
else:
|
||||
rpm_cmd.append('-ba')
|
||||
rpm_cmd.extend(['--define', '__python %s' % self.python])
|
||||
rpm_cmd.extend(['--define', f'__python {self.python}'])
|
||||
if self.rpm3_mode:
|
||||
rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)])
|
||||
rpm_cmd.extend(['--define', f'_topdir {os.path.abspath(self.rpm_base)}'])
|
||||
if not self.keep_temp:
|
||||
rpm_cmd.append('--clean')
|
||||
|
||||
@ -370,7 +370,7 @@ class bdist_rpm(Command):
|
||||
|
||||
status = out.close()
|
||||
if status:
|
||||
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
|
||||
raise DistutilsExecError(f"Failed to execute: {q_cmd!r}")
|
||||
|
||||
finally:
|
||||
out.close()
|
||||
@ -426,7 +426,7 @@ class bdist_rpm(Command):
|
||||
# normalizing the whitespace to simplify the test for whether the
|
||||
# invocation of brp-python-bytecompile passes in __python):
|
||||
vendor_hook = '\n'.join([
|
||||
' %s \\' % line.strip() for line in vendor_hook.splitlines()
|
||||
f' {line.strip()} \\' for line in vendor_hook.splitlines()
|
||||
])
|
||||
problem = "brp-python-bytecompile \\\n"
|
||||
fixed = "brp-python-bytecompile %{__python} \\\n"
|
||||
@ -468,7 +468,7 @@ class bdist_rpm(Command):
|
||||
if not self.distribution.has_ext_modules():
|
||||
spec_file.append('BuildArch: noarch')
|
||||
else:
|
||||
spec_file.append('BuildArch: %s' % self.force_arch)
|
||||
spec_file.append(f'BuildArch: {self.force_arch}')
|
||||
|
||||
for field in (
|
||||
'Vendor',
|
||||
@ -518,7 +518,7 @@ class bdist_rpm(Command):
|
||||
# rpm scripts
|
||||
# figure out default build script
|
||||
def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}"
|
||||
def_build = "%s build" % def_setup_call
|
||||
def_build = f"{def_setup_call} build"
|
||||
if self.use_rpm_opt_flags:
|
||||
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
|
||||
|
||||
@ -528,9 +528,7 @@ class bdist_rpm(Command):
|
||||
# that we open and interpolate into the spec file, but the defaults
|
||||
# are just text that we drop in as-is. Hmmm.
|
||||
|
||||
install_cmd = (
|
||||
'%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
|
||||
) % def_setup_call
|
||||
install_cmd = f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES'
|
||||
|
||||
script_options = [
|
||||
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
|
||||
|
@ -4,6 +4,7 @@ Implements the Distutils 'build' command."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
from ..core import Command
|
||||
from ..errors import DistutilsOptionError
|
||||
@ -26,16 +27,14 @@ class build(Command):
|
||||
(
|
||||
'build-lib=',
|
||||
None,
|
||||
"build directory for all distribution (defaults to either "
|
||||
+ "build-purelib or build-platlib",
|
||||
"build directory for all distribution (defaults to either build-purelib or build-platlib",
|
||||
),
|
||||
('build-scripts=', None, "build directory for scripts"),
|
||||
('build-temp=', 't', "temporary build directory"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to build for, if supported "
|
||||
"(default: %s)" % get_platform(),
|
||||
f"platform name to build for, if supported [default: {get_platform()}]",
|
||||
),
|
||||
('compiler=', 'c', "specify the compiler type"),
|
||||
('parallel=', 'j', "number of parallel build jobs"),
|
||||
@ -62,7 +61,7 @@ class build(Command):
|
||||
self.compiler = None
|
||||
self.plat_name = None
|
||||
self.debug = None
|
||||
self.force = 0
|
||||
self.force = False
|
||||
self.executable = None
|
||||
self.parallel = None
|
||||
|
||||
@ -81,6 +80,10 @@ class build(Command):
|
||||
|
||||
plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}"
|
||||
|
||||
# Python 3.13+ with --disable-gil shouldn't share build directories
|
||||
if sysconfig.get_config_var('Py_GIL_DISABLED'):
|
||||
plat_specifier += 't'
|
||||
|
||||
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
|
||||
# share the same build directories. Doing so confuses the build
|
||||
# process for C modules
|
||||
|
@ -57,7 +57,7 @@ class build_clib(Command):
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.debug = None
|
||||
self.force = 0
|
||||
self.force = False
|
||||
self.compiler = None
|
||||
|
||||
def finalize_options(self):
|
||||
@ -138,8 +138,8 @@ class build_clib(Command):
|
||||
|
||||
if '/' in name or (os.sep != '/' and os.sep in name):
|
||||
raise DistutilsSetupError(
|
||||
"bad library name '%s': "
|
||||
"may not contain directory separators" % lib[0]
|
||||
f"bad library name '{lib[0]}': "
|
||||
"may not contain directory separators"
|
||||
)
|
||||
|
||||
if not isinstance(build_info, dict):
|
||||
@ -166,9 +166,9 @@ class build_clib(Command):
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
f"in 'libraries' option (library '{lib_name}'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name
|
||||
"a list of source filenames"
|
||||
)
|
||||
|
||||
filenames.extend(sources)
|
||||
@ -179,9 +179,9 @@ class build_clib(Command):
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
f"in 'libraries' option (library '{lib_name}'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name
|
||||
"a list of source filenames"
|
||||
)
|
||||
sources = list(sources)
|
||||
|
||||
|
@ -23,7 +23,7 @@ from ..errors import (
|
||||
)
|
||||
from ..extension import Extension
|
||||
from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
|
||||
from ..util import get_platform
|
||||
from ..util import get_platform, is_mingw
|
||||
|
||||
# An extension name is just a dot-separated list of Python NAMEs (ie.
|
||||
# the same as a fully-qualified module name).
|
||||
@ -57,7 +57,7 @@ class build_ext(Command):
|
||||
# takes care of both command-line and client options
|
||||
# in between initialize_options() and finalize_options())
|
||||
|
||||
sep_by = " (separated by '%s')" % os.pathsep
|
||||
sep_by = f" (separated by '{os.pathsep}')"
|
||||
user_options = [
|
||||
('build-lib=', 'b', "directory for compiled extension modules"),
|
||||
('build-temp=', 't', "directory for temporary files (build by-products)"),
|
||||
@ -65,13 +65,13 @@ class build_ext(Command):
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to cross-compile for, if supported "
|
||||
"(default: %s)" % get_platform(),
|
||||
f"[default: {get_platform()}]",
|
||||
),
|
||||
(
|
||||
'inplace',
|
||||
'i',
|
||||
"ignore build-lib and put compiled extensions into the source "
|
||||
+ "directory alongside your pure Python modules",
|
||||
"directory alongside your pure Python modules",
|
||||
),
|
||||
(
|
||||
'include-dirs=',
|
||||
@ -109,7 +109,7 @@ class build_ext(Command):
|
||||
self.build_lib = None
|
||||
self.plat_name = None
|
||||
self.build_temp = None
|
||||
self.inplace = 0
|
||||
self.inplace = False
|
||||
self.package = None
|
||||
|
||||
self.include_dirs = None
|
||||
@ -175,7 +175,7 @@ class build_ext(Command):
|
||||
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
||||
# etc.) are in the include search path.
|
||||
py_include = sysconfig.get_python_inc()
|
||||
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
|
||||
plat_py_include = sysconfig.get_python_inc(plat_specific=True)
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
@ -212,7 +212,7 @@ class build_ext(Command):
|
||||
# for extensions under windows use different directories
|
||||
# for Release and Debug builds.
|
||||
# also Python's library directory must be appended to library_dirs
|
||||
if os.name == 'nt':
|
||||
if os.name == 'nt' and not is_mingw():
|
||||
# the 'libs' directory is for binary installs - we assume that
|
||||
# must be the *native* platform. But we don't really support
|
||||
# cross-compiling via a binary install anyway, so we let it go.
|
||||
@ -465,10 +465,7 @@ class build_ext(Command):
|
||||
# And build the list of output (built) filenames. Note that this
|
||||
# ignores the 'inplace' flag, and assumes everything goes in the
|
||||
# "build" tree.
|
||||
outputs = []
|
||||
for ext in self.extensions:
|
||||
outputs.append(self.get_ext_fullpath(ext.name))
|
||||
return outputs
|
||||
return [self.get_ext_fullpath(ext.name) for ext in self.extensions]
|
||||
|
||||
def build_extensions(self):
|
||||
# First, sanity-check the 'extensions' list
|
||||
@ -517,9 +514,9 @@ class build_ext(Command):
|
||||
sources = ext.sources
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'ext_modules' option (extension '%s'), "
|
||||
f"in 'ext_modules' option (extension '{ext.name}'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % ext.name
|
||||
"a list of source filenames"
|
||||
)
|
||||
# sort to make the resulting .so file build reproducible
|
||||
sources = sorted(sources)
|
||||
@ -641,8 +638,7 @@ class build_ext(Command):
|
||||
|
||||
# Do not override commandline arguments
|
||||
if not self.swig_opts:
|
||||
for o in extension.swig_opts:
|
||||
swig_cmd.append(o)
|
||||
swig_cmd.extend(extension.swig_opts)
|
||||
|
||||
for source in swig_sources:
|
||||
target = swig_targets[source]
|
||||
@ -663,7 +659,7 @@ class build_ext(Command):
|
||||
# Windows (or so I presume!). If we find it there, great;
|
||||
# if not, act like Unix and assume it's in the PATH.
|
||||
for vers in ("1.3", "1.2", "1.1"):
|
||||
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
|
||||
fn = os.path.join(f"c:\\swig{vers}", "swig.exe")
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
else:
|
||||
@ -671,7 +667,7 @@ class build_ext(Command):
|
||||
else:
|
||||
raise DistutilsPlatformError(
|
||||
"I don't know how to find (much less run) SWIG "
|
||||
"on platform '%s'" % os.name
|
||||
f"on platform '{os.name}'"
|
||||
)
|
||||
|
||||
# -- Name generators -----------------------------------------------
|
||||
@ -754,7 +750,7 @@ class build_ext(Command):
|
||||
# pyconfig.h that MSVC groks. The other Windows compilers all seem
|
||||
# to need it mentioned explicitly, though, so that's what we do.
|
||||
# Append '_d' to the python import library on debug builds.
|
||||
if sys.platform == "win32":
|
||||
if sys.platform == "win32" and not is_mingw():
|
||||
from .._msvccompiler import MSVCCompiler
|
||||
|
||||
if not isinstance(self.compiler, MSVCCompiler):
|
||||
@ -784,7 +780,7 @@ class build_ext(Command):
|
||||
# A native build on an Android device or on Cygwin
|
||||
if hasattr(sys, 'getandroidapilevel'):
|
||||
link_libpython = True
|
||||
elif sys.platform == 'cygwin':
|
||||
elif sys.platform == 'cygwin' or is_mingw():
|
||||
link_libpython = True
|
||||
elif '_PYTHON_HOST_PLATFORM' in os.environ:
|
||||
# We are cross-compiling for one of the relevant platforms
|
||||
|
@ -38,7 +38,7 @@ class build_py(Command):
|
||||
self.package = None
|
||||
self.package_data = None
|
||||
self.package_dir = None
|
||||
self.compile = 0
|
||||
self.compile = False
|
||||
self.optimize = 0
|
||||
self.force = None
|
||||
|
||||
@ -95,7 +95,7 @@ class build_py(Command):
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
self.byte_compile(self.get_outputs(include_bytecode=0))
|
||||
self.byte_compile(self.get_outputs(include_bytecode=False))
|
||||
|
||||
def get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||
@ -191,12 +191,12 @@ class build_py(Command):
|
||||
if package_dir != "":
|
||||
if not os.path.exists(package_dir):
|
||||
raise DistutilsFileError(
|
||||
"package directory '%s' does not exist" % package_dir
|
||||
f"package directory '{package_dir}' does not exist"
|
||||
)
|
||||
if not os.path.isdir(package_dir):
|
||||
raise DistutilsFileError(
|
||||
"supposed package directory '%s' exists, "
|
||||
"but is not a directory" % package_dir
|
||||
f"supposed package directory '{package_dir}' exists, "
|
||||
"but is not a directory"
|
||||
)
|
||||
|
||||
# Directories without __init__.py are namespace packages (PEP 420).
|
||||
@ -228,7 +228,7 @@ class build_py(Command):
|
||||
module = os.path.splitext(os.path.basename(f))[0]
|
||||
modules.append((package, module, f))
|
||||
else:
|
||||
self.debug_print("excluding %s" % setup_script)
|
||||
self.debug_print(f"excluding {setup_script}")
|
||||
return modules
|
||||
|
||||
def find_modules(self):
|
||||
@ -264,7 +264,7 @@ class build_py(Command):
|
||||
(package_dir, checked) = packages[package]
|
||||
except KeyError:
|
||||
package_dir = self.get_package_dir(package)
|
||||
checked = 0
|
||||
checked = False
|
||||
|
||||
if not checked:
|
||||
init_py = self.check_package(package, package_dir)
|
||||
@ -306,7 +306,7 @@ class build_py(Command):
|
||||
outfile_path = [build_dir] + list(package) + [module + ".py"]
|
||||
return os.path.join(*outfile_path)
|
||||
|
||||
def get_outputs(self, include_bytecode=1):
|
||||
def get_outputs(self, include_bytecode=True):
|
||||
modules = self.find_all_modules()
|
||||
outputs = []
|
||||
for package, module, _module_file in modules:
|
||||
@ -347,7 +347,7 @@ class build_py(Command):
|
||||
outfile = self.get_module_outfile(self.build_lib, package, module)
|
||||
dir = os.path.dirname(outfile)
|
||||
self.mkpath(dir)
|
||||
return self.copy_file(module_file, outfile, preserve_mode=0)
|
||||
return self.copy_file(module_file, outfile, preserve_mode=False)
|
||||
|
||||
def build_modules(self):
|
||||
modules = self.find_modules()
|
||||
|
@ -96,7 +96,7 @@ class build_scripts(Command):
|
||||
else:
|
||||
first_line = f.readline()
|
||||
if not first_line:
|
||||
self.warn("%s is an empty file (skipping)" % script)
|
||||
self.warn(f"{script} is an empty file (skipping)")
|
||||
return
|
||||
|
||||
shebang_match = shebang_pattern.match(first_line)
|
||||
|
@ -21,7 +21,7 @@ with contextlib.suppress(ImportError):
|
||||
report_level,
|
||||
halt_level,
|
||||
stream=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
encoding='ascii',
|
||||
error_handler='replace',
|
||||
):
|
||||
@ -58,9 +58,9 @@ class check(Command):
|
||||
|
||||
def initialize_options(self):
|
||||
"""Sets default values for options."""
|
||||
self.restructuredtext = 0
|
||||
self.restructuredtext = False
|
||||
self.metadata = 1
|
||||
self.strict = 0
|
||||
self.strict = False
|
||||
self._warnings = 0
|
||||
|
||||
def finalize_options(self):
|
||||
@ -100,13 +100,12 @@ class check(Command):
|
||||
"""
|
||||
metadata = self.distribution.metadata
|
||||
|
||||
missing = []
|
||||
for attr in 'name', 'version':
|
||||
if not getattr(metadata, attr, None):
|
||||
missing.append(attr)
|
||||
missing = [
|
||||
attr for attr in ('name', 'version') if not getattr(metadata, attr, None)
|
||||
]
|
||||
|
||||
if missing:
|
||||
self.warn("missing required meta-data: %s" % ', '.join(missing))
|
||||
self.warn("missing required meta-data: {}".format(', '.join(missing)))
|
||||
|
||||
def check_restructuredtext(self):
|
||||
"""Checks if the long string fields are reST-compliant."""
|
||||
@ -147,7 +146,7 @@ class check(Command):
|
||||
except AttributeError as e:
|
||||
reporter.messages.append((
|
||||
-1,
|
||||
'Could not finish the parsing: %s.' % e,
|
||||
f'Could not finish the parsing: {e}.',
|
||||
'',
|
||||
{},
|
||||
))
|
||||
|
@ -14,17 +14,17 @@ from ..dir_util import remove_tree
|
||||
class clean(Command):
|
||||
description = "clean up temporary files from 'build' command"
|
||||
user_options = [
|
||||
('build-base=', 'b', "base build directory (default: 'build.build-base')"),
|
||||
('build-base=', 'b', "base build directory [default: 'build.build-base']"),
|
||||
(
|
||||
'build-lib=',
|
||||
None,
|
||||
"build directory for all modules (default: 'build.build-lib')",
|
||||
"build directory for all modules [default: 'build.build-lib']",
|
||||
),
|
||||
('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"),
|
||||
('build-temp=', 't', "temporary build directory [default: 'build.build-temp']"),
|
||||
(
|
||||
'build-scripts=',
|
||||
None,
|
||||
"build directory for scripts (default: 'build.build-scripts')",
|
||||
"build directory for scripts [default: 'build.build-scripts']",
|
||||
),
|
||||
('bdist-base=', None, "temporary directory for built distributions"),
|
||||
('all', 'a', "remove all build output, not just temporary by-products"),
|
||||
|
@ -94,7 +94,7 @@ class config(Command):
|
||||
|
||||
if not isinstance(self.compiler, CCompiler):
|
||||
self.compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=1
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=True
|
||||
)
|
||||
customize_compiler(self.compiler)
|
||||
if self.include_dirs:
|
||||
@ -109,7 +109,7 @@ class config(Command):
|
||||
with open(filename, "w", encoding='utf-8') as file:
|
||||
if headers:
|
||||
for header in headers:
|
||||
file.write("#include <%s>\n" % header)
|
||||
file.write(f"#include <{header}>\n")
|
||||
file.write("\n")
|
||||
file.write(body)
|
||||
if body[-1] != "\n":
|
||||
@ -126,7 +126,7 @@ class config(Command):
|
||||
def _compile(self, body, headers, include_dirs, lang):
|
||||
src = self._gen_temp_sourcefile(body, headers, lang)
|
||||
if self.dump_source:
|
||||
dump_file(src, "compiling '%s':" % src)
|
||||
dump_file(src, f"compiling '{src}':")
|
||||
(obj,) = self.compiler.object_filenames([src])
|
||||
self.temp_files.extend([src, obj])
|
||||
self.compiler.compile([src], include_dirs=include_dirs)
|
||||
@ -292,8 +292,8 @@ class config(Command):
|
||||
include_dirs=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
decl=0,
|
||||
call=0,
|
||||
decl=False,
|
||||
call=False,
|
||||
):
|
||||
"""Determine if function 'func' is available by constructing a
|
||||
source file that refers to 'func', and compiles and links it.
|
||||
@ -311,12 +311,12 @@ class config(Command):
|
||||
self._check_compiler()
|
||||
body = []
|
||||
if decl:
|
||||
body.append("int %s ();" % func)
|
||||
body.append(f"int {func} ();")
|
||||
body.append("int main () {")
|
||||
if call:
|
||||
body.append(" %s();" % func)
|
||||
body.append(f" {func}();")
|
||||
else:
|
||||
body.append(" %s;" % func)
|
||||
body.append(f" {func};")
|
||||
body.append("}")
|
||||
body = "\n".join(body) + "\n"
|
||||
|
||||
|
@ -193,8 +193,7 @@ class install(Command):
|
||||
(
|
||||
'install-platbase=',
|
||||
None,
|
||||
"base installation directory for platform-specific files "
|
||||
+ "(instead of --exec-prefix or --home)",
|
||||
"base installation directory for platform-specific files (instead of --exec-prefix or --home)",
|
||||
),
|
||||
('root=', None, "install everything relative to this alternate root directory"),
|
||||
# Or, explicitly set the installation scheme
|
||||
@ -211,8 +210,7 @@ class install(Command):
|
||||
(
|
||||
'install-lib=',
|
||||
None,
|
||||
"installation directory for all module distributions "
|
||||
+ "(overrides --install-purelib and --install-platlib)",
|
||||
"installation directory for all module distributions (overrides --install-purelib and --install-platlib)",
|
||||
),
|
||||
('install-headers=', None, "installation directory for C/C++ headers"),
|
||||
('install-scripts=', None, "installation directory for Python scripts"),
|
||||
@ -245,7 +243,7 @@ class install(Command):
|
||||
user_options.append((
|
||||
'user',
|
||||
None,
|
||||
"install in user site-package '%s'" % USER_SITE,
|
||||
f"install in user site-package '{USER_SITE}'",
|
||||
))
|
||||
boolean_options.append('user')
|
||||
|
||||
@ -258,7 +256,7 @@ class install(Command):
|
||||
self.prefix = None
|
||||
self.exec_prefix = None
|
||||
self.home = None
|
||||
self.user = 0
|
||||
self.user = False
|
||||
|
||||
# These select only the installation base; it's up to the user to
|
||||
# specify the installation scheme (currently, that means supplying
|
||||
@ -293,7 +291,7 @@ class install(Command):
|
||||
# 'install_path_file' is always true unless some outsider meddles
|
||||
# with it.
|
||||
self.extra_path = None
|
||||
self.install_path_file = 1
|
||||
self.install_path_file = True
|
||||
|
||||
# 'force' forces installation, even if target files are not
|
||||
# out-of-date. 'skip_build' skips running the "build" command,
|
||||
@ -301,9 +299,9 @@ class install(Command):
|
||||
# a user option, it's just there so the bdist_* commands can turn
|
||||
# it off) determines whether we warn about installing to a
|
||||
# directory not in sys.path.
|
||||
self.force = 0
|
||||
self.skip_build = 0
|
||||
self.warn_dir = 1
|
||||
self.force = False
|
||||
self.skip_build = False
|
||||
self.warn_dir = True
|
||||
|
||||
# These are only here as a conduit from the 'build' command to the
|
||||
# 'install_*' commands that do the real work. ('build_base' isn't
|
||||
@ -348,8 +346,7 @@ class install(Command):
|
||||
self.install_base or self.install_platbase
|
||||
):
|
||||
raise DistutilsOptionError(
|
||||
"must supply either prefix/exec-prefix/home or "
|
||||
+ "install-base/install-platbase -- not both"
|
||||
"must supply either prefix/exec-prefix/home or install-base/install-platbase -- not both"
|
||||
)
|
||||
|
||||
if self.home and (self.prefix or self.exec_prefix):
|
||||
@ -600,7 +597,7 @@ class install(Command):
|
||||
self.select_scheme(os.name)
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"I don't know how to install stuff on '%s'" % os.name
|
||||
f"I don't know how to install stuff on '{os.name}'"
|
||||
)
|
||||
|
||||
def select_scheme(self, name):
|
||||
@ -683,9 +680,9 @@ class install(Command):
|
||||
if not self.user:
|
||||
return
|
||||
home = convert_path(os.path.expanduser("~"))
|
||||
for _name, path in self.config_vars.items():
|
||||
for path in self.config_vars.values():
|
||||
if str(path).startswith(home) and not os.path.isdir(path):
|
||||
self.debug_print("os.makedirs('%s', 0o700)" % path)
|
||||
self.debug_print(f"os.makedirs('{path}', 0o700)")
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
# -- Command execution methods -------------------------------------
|
||||
@ -720,7 +717,7 @@ class install(Command):
|
||||
self.execute(
|
||||
write_file,
|
||||
(self.record, outputs),
|
||||
"writing list of installed files to '%s'" % self.record,
|
||||
f"writing list of installed files to '{self.record}'",
|
||||
)
|
||||
|
||||
sys_path = map(os.path.normpath, sys.path)
|
||||
@ -745,10 +742,10 @@ class install(Command):
|
||||
filename = os.path.join(self.install_libbase, self.path_file + ".pth")
|
||||
if self.install_path_file:
|
||||
self.execute(
|
||||
write_file, (filename, [self.extra_dirs]), "creating %s" % filename
|
||||
write_file, (filename, [self.extra_dirs]), f"creating {filename}"
|
||||
)
|
||||
else:
|
||||
self.warn("path file '%s' not created" % filename)
|
||||
self.warn(f"path file '{filename}' not created")
|
||||
|
||||
# -- Reporting methods ---------------------------------------------
|
||||
|
||||
|
@ -5,7 +5,11 @@ platform-independent data files."""
|
||||
|
||||
# contributed by Bastian Kleineidam
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import os
|
||||
from typing import Iterable
|
||||
|
||||
from ..core import Command
|
||||
from ..util import change_root, convert_path
|
||||
@ -19,7 +23,7 @@ class install_data(Command):
|
||||
'install-dir=',
|
||||
'd',
|
||||
"base directory for installing data files "
|
||||
"(default: installation base dir)",
|
||||
"[default: installation base dir]",
|
||||
),
|
||||
('root=', None, "install everything relative to this alternate root directory"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
@ -31,9 +35,9 @@ class install_data(Command):
|
||||
self.install_dir = None
|
||||
self.outfiles = []
|
||||
self.root = None
|
||||
self.force = 0
|
||||
self.force = False
|
||||
self.data_files = self.distribution.data_files
|
||||
self.warn_dir = 1
|
||||
self.warn_dir = True
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options(
|
||||
@ -46,36 +50,42 @@ class install_data(Command):
|
||||
def run(self):
|
||||
self.mkpath(self.install_dir)
|
||||
for f in self.data_files:
|
||||
if isinstance(f, str):
|
||||
# it's a simple file, so copy it
|
||||
f = convert_path(f)
|
||||
if self.warn_dir:
|
||||
self.warn(
|
||||
"setup script did not provide a directory for "
|
||||
f"'{f}' -- installing right in '{self.install_dir}'"
|
||||
)
|
||||
(out, _) = self.copy_file(f, self.install_dir)
|
||||
self.outfiles.append(out)
|
||||
else:
|
||||
# it's a tuple with path to install to and a list of files
|
||||
dir = convert_path(f[0])
|
||||
if not os.path.isabs(dir):
|
||||
dir = os.path.join(self.install_dir, dir)
|
||||
elif self.root:
|
||||
dir = change_root(self.root, dir)
|
||||
self.mkpath(dir)
|
||||
self._copy(f)
|
||||
|
||||
if f[1] == []:
|
||||
# If there are no files listed, the user must be
|
||||
# trying to create an empty directory, so add the
|
||||
# directory to the list of output files.
|
||||
self.outfiles.append(dir)
|
||||
else:
|
||||
# Copy files, adding them to the list of output files.
|
||||
for data in f[1]:
|
||||
data = convert_path(data)
|
||||
(out, _) = self.copy_file(data, dir)
|
||||
self.outfiles.append(out)
|
||||
@functools.singledispatchmethod
|
||||
def _copy(self, f: tuple[str | os.PathLike, Iterable[str | os.PathLike]]):
|
||||
# it's a tuple with path to install to and a list of files
|
||||
dir = convert_path(f[0])
|
||||
if not os.path.isabs(dir):
|
||||
dir = os.path.join(self.install_dir, dir)
|
||||
elif self.root:
|
||||
dir = change_root(self.root, dir)
|
||||
self.mkpath(dir)
|
||||
|
||||
if f[1] == []:
|
||||
# If there are no files listed, the user must be
|
||||
# trying to create an empty directory, so add the
|
||||
# directory to the list of output files.
|
||||
self.outfiles.append(dir)
|
||||
else:
|
||||
# Copy files, adding them to the list of output files.
|
||||
for data in f[1]:
|
||||
data = convert_path(data)
|
||||
(out, _) = self.copy_file(data, dir)
|
||||
self.outfiles.append(out)
|
||||
|
||||
@_copy.register(str)
|
||||
@_copy.register(os.PathLike)
|
||||
def _(self, f: str | os.PathLike):
|
||||
# it's a simple file, so copy it
|
||||
f = convert_path(f)
|
||||
if self.warn_dir:
|
||||
self.warn(
|
||||
"setup script did not provide a directory for "
|
||||
f"'{f}' -- installing right in '{self.install_dir}'"
|
||||
)
|
||||
(out, _) = self.copy_file(f, self.install_dir)
|
||||
self.outfiles.append(out)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.data_files or []
|
||||
|
@ -19,7 +19,7 @@ class install_headers(Command):
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = 0
|
||||
self.force = False
|
||||
self.outfiles = []
|
||||
|
||||
def finalize_options(self):
|
||||
|
@ -54,7 +54,7 @@ class install_lib(Command):
|
||||
# let the 'install' command dictate our installation directory
|
||||
self.install_dir = None
|
||||
self.build_dir = None
|
||||
self.force = 0
|
||||
self.force = False
|
||||
self.compile = None
|
||||
self.optimize = None
|
||||
self.skip_build = None
|
||||
@ -81,9 +81,9 @@ class install_lib(Command):
|
||||
if not isinstance(self.optimize, int):
|
||||
try:
|
||||
self.optimize = int(self.optimize)
|
||||
if self.optimize not in (0, 1, 2):
|
||||
raise AssertionError
|
||||
except (ValueError, AssertionError):
|
||||
except ValueError:
|
||||
pass
|
||||
if self.optimize not in (0, 1, 2):
|
||||
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
||||
|
||||
def run(self):
|
||||
@ -114,7 +114,7 @@ class install_lib(Command):
|
||||
outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
else:
|
||||
self.warn(
|
||||
"'%s' does not exist -- no Python modules to install" % self.build_dir
|
||||
f"'{self.build_dir}' does not exist -- no Python modules to install"
|
||||
)
|
||||
return
|
||||
return outfiles
|
||||
@ -161,9 +161,7 @@ class install_lib(Command):
|
||||
build_dir = getattr(build_cmd, cmd_option)
|
||||
|
||||
prefix_len = len(build_dir) + len(os.sep)
|
||||
outputs = []
|
||||
for file in build_files:
|
||||
outputs.append(os.path.join(output_dir, file[prefix_len:]))
|
||||
outputs = [os.path.join(output_dir, file[prefix_len:]) for file in build_files]
|
||||
|
||||
return outputs
|
||||
|
||||
|
@ -26,7 +26,7 @@ class install_scripts(Command):
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = 0
|
||||
self.force = False
|
||||
self.build_dir = None
|
||||
self.skip_build = None
|
||||
|
||||
|
@ -37,8 +37,8 @@ class register(PyPIRCCommand):
|
||||
|
||||
def initialize_options(self):
|
||||
PyPIRCCommand.initialize_options(self)
|
||||
self.list_classifiers = 0
|
||||
self.strict = 0
|
||||
self.list_classifiers = False
|
||||
self.strict = False
|
||||
|
||||
def finalize_options(self):
|
||||
PyPIRCCommand.finalize_options(self)
|
||||
@ -74,7 +74,7 @@ class register(PyPIRCCommand):
|
||||
check = self.distribution.get_command_obj('check')
|
||||
check.ensure_finalized()
|
||||
check.strict = self.strict
|
||||
check.restructuredtext = 1
|
||||
check.restructuredtext = True
|
||||
check.run()
|
||||
|
||||
def _set_config(self):
|
||||
@ -88,7 +88,7 @@ class register(PyPIRCCommand):
|
||||
self.has_config = True
|
||||
else:
|
||||
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
||||
raise ValueError('%s not found in .pypirc' % self.repository)
|
||||
raise ValueError(f'{self.repository} not found in .pypirc')
|
||||
if self.repository == 'pypi':
|
||||
self.repository = self.DEFAULT_REPOSITORY
|
||||
self.has_config = False
|
||||
@ -192,7 +192,7 @@ Your selection [default 1]: """,
|
||||
logging.INFO,
|
||||
)
|
||||
self.announce(
|
||||
'(the login will be stored in %s)' % self._get_rc_file(),
|
||||
f'(the login will be stored in {self._get_rc_file()})',
|
||||
logging.INFO,
|
||||
)
|
||||
choice = 'X'
|
||||
@ -225,7 +225,7 @@ Your selection [default 1]: """,
|
||||
log.info('Server response (%s): %s', code, result)
|
||||
else:
|
||||
log.info('You will receive an email shortly.')
|
||||
log.info('Follow the instructions in it to ' 'complete registration.')
|
||||
log.info('Follow the instructions in it to complete registration.')
|
||||
elif choice == '3':
|
||||
data = {':action': 'password_reset'}
|
||||
data['email'] = ''
|
||||
@ -277,7 +277,7 @@ Your selection [default 1]: """,
|
||||
for key, values in data.items():
|
||||
for value in map(str, make_iterable(values)):
|
||||
body.write(sep_boundary)
|
||||
body.write('\nContent-Disposition: form-data; name="%s"' % key)
|
||||
body.write(f'\nContent-Disposition: form-data; name="{key}"')
|
||||
body.write("\n\n")
|
||||
body.write(value)
|
||||
if value and value[-1] == '\r':
|
||||
@ -288,8 +288,7 @@ Your selection [default 1]: """,
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'
|
||||
% boundary,
|
||||
'Content-type': f'multipart/form-data; boundary={boundary}; charset=utf-8',
|
||||
'Content-length': str(len(body)),
|
||||
}
|
||||
req = urllib.request.Request(self.repository, body, headers)
|
||||
|
@ -24,10 +24,10 @@ def show_formats():
|
||||
from ..archive_util import ARCHIVE_FORMATS
|
||||
from ..fancy_getopt import FancyGetopt
|
||||
|
||||
formats = []
|
||||
for format in ARCHIVE_FORMATS.keys():
|
||||
formats.append(("formats=" + format, None, ARCHIVE_FORMATS[format][2]))
|
||||
formats.sort()
|
||||
formats = sorted(
|
||||
("formats=" + format, None, ARCHIVE_FORMATS[format][2])
|
||||
for format in ARCHIVE_FORMATS.keys()
|
||||
)
|
||||
FancyGetopt(formats).print_help("List of available source distribution formats:")
|
||||
|
||||
|
||||
@ -125,14 +125,14 @@ class sdist(Command):
|
||||
|
||||
# 'use_defaults': if true, we will include the default file set
|
||||
# in the manifest
|
||||
self.use_defaults = 1
|
||||
self.prune = 1
|
||||
self.use_defaults = True
|
||||
self.prune = True
|
||||
|
||||
self.manifest_only = 0
|
||||
self.force_manifest = 0
|
||||
self.manifest_only = False
|
||||
self.force_manifest = False
|
||||
|
||||
self.formats = ['gztar']
|
||||
self.keep_temp = 0
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
|
||||
self.archive_files = None
|
||||
@ -150,7 +150,7 @@ class sdist(Command):
|
||||
|
||||
bad_format = archive_util.check_archive_formats(self.formats)
|
||||
if bad_format:
|
||||
raise DistutilsOptionError("unknown archive format '%s'" % bad_format)
|
||||
raise DistutilsOptionError(f"unknown archive format '{bad_format}'")
|
||||
|
||||
if self.dist_dir is None:
|
||||
self.dist_dir = "dist"
|
||||
@ -288,7 +288,7 @@ class sdist(Command):
|
||||
if self._cs_path_exists(fn):
|
||||
self.filelist.append(fn)
|
||||
else:
|
||||
self.warn("standard file '%s' not found" % fn)
|
||||
self.warn(f"standard file '{fn}' not found")
|
||||
|
||||
def _add_defaults_optional(self):
|
||||
optional = ['tests/test*.py', 'test/test*.py', 'setup.cfg']
|
||||
@ -353,12 +353,12 @@ class sdist(Command):
|
||||
log.info("reading manifest template '%s'", self.template)
|
||||
template = TextFile(
|
||||
self.template,
|
||||
strip_comments=1,
|
||||
skip_blanks=1,
|
||||
join_lines=1,
|
||||
lstrip_ws=1,
|
||||
rstrip_ws=1,
|
||||
collapse_join=1,
|
||||
strip_comments=True,
|
||||
skip_blanks=True,
|
||||
join_lines=True,
|
||||
lstrip_ws=True,
|
||||
rstrip_ws=True,
|
||||
collapse_join=True,
|
||||
)
|
||||
|
||||
try:
|
||||
@ -401,7 +401,7 @@ class sdist(Command):
|
||||
|
||||
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs']
|
||||
vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
|
||||
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
|
||||
self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
|
||||
|
||||
def write_manifest(self):
|
||||
"""Write the file list in 'self.filelist' (presumably as filled in
|
||||
@ -410,8 +410,7 @@ class sdist(Command):
|
||||
"""
|
||||
if self._manifest_is_not_generated():
|
||||
log.info(
|
||||
"not writing to manually maintained "
|
||||
"manifest file '%s'" % self.manifest
|
||||
f"not writing to manually maintained manifest file '{self.manifest}'"
|
||||
)
|
||||
return
|
||||
|
||||
@ -420,7 +419,7 @@ class sdist(Command):
|
||||
self.execute(
|
||||
file_util.write_file,
|
||||
(self.manifest, content),
|
||||
"writing manifest file '%s'" % self.manifest,
|
||||
f"writing manifest file '{self.manifest}'",
|
||||
)
|
||||
|
||||
def _manifest_is_not_generated(self):
|
||||
@ -468,10 +467,10 @@ class sdist(Command):
|
||||
|
||||
if hasattr(os, 'link'): # can make hard links on this system
|
||||
link = 'hard'
|
||||
msg = "making hard links in %s..." % base_dir
|
||||
msg = f"making hard links in {base_dir}..."
|
||||
else: # nope, have to copy
|
||||
link = None
|
||||
msg = "copying files to %s..." % base_dir
|
||||
msg = f"copying files to {base_dir}..."
|
||||
|
||||
if not files:
|
||||
log.warning("no files to distribute -- empty manifest?")
|
||||
|
@ -41,7 +41,7 @@ class upload(PyPIRCCommand):
|
||||
PyPIRCCommand.initialize_options(self)
|
||||
self.username = ''
|
||||
self.password = ''
|
||||
self.show_response = 0
|
||||
self.show_response = False
|
||||
self.sign = False
|
||||
self.identity = None
|
||||
|
||||
@ -75,7 +75,7 @@ class upload(PyPIRCCommand):
|
||||
# Makes sure the repository URL is compliant
|
||||
schema, netloc, url, params, query, fragments = urlparse(self.repository)
|
||||
if params or query or fragments:
|
||||
raise AssertionError("Incompatible url %s" % self.repository)
|
||||
raise AssertionError(f"Incompatible url {self.repository}")
|
||||
|
||||
if schema not in ('http', 'https'):
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
@ -153,10 +153,10 @@ class upload(PyPIRCCommand):
|
||||
end_boundary = sep_boundary + b'--\r\n'
|
||||
body = io.BytesIO()
|
||||
for key, values in data.items():
|
||||
title = '\r\nContent-Disposition: form-data; name="%s"' % key
|
||||
title = f'\r\nContent-Disposition: form-data; name="{key}"'
|
||||
for value in make_iterable(values):
|
||||
if type(value) is tuple:
|
||||
title += '; filename="%s"' % value[0]
|
||||
title += f'; filename="{value[0]}"'
|
||||
value = value[1]
|
||||
else:
|
||||
value = str(value).encode('utf-8')
|
||||
@ -172,7 +172,7 @@ class upload(PyPIRCCommand):
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s' % boundary,
|
||||
'Content-type': f'multipart/form-data; boundary={boundary}',
|
||||
'Content-length': str(len(body)),
|
||||
'Authorization': auth,
|
||||
}
|
||||
|
@ -3,11 +3,11 @@ from __future__ import annotations
|
||||
from .py38 import removeprefix
|
||||
|
||||
|
||||
def consolidate_linker_args(args: list[str]) -> str:
|
||||
def consolidate_linker_args(args: list[str]) -> list[str] | str:
|
||||
"""
|
||||
Ensure the return value is a string for backward compatibility.
|
||||
|
||||
Retain until at least 2024-04-31. See pypa/distutils#246
|
||||
Retain until at least 2025-04-31. See pypa/distutils#246
|
||||
"""
|
||||
|
||||
if not all(arg.startswith('-Wl,') for arg in args):
|
||||
|
@ -14,6 +14,7 @@ if sys.version_info < (3, 9):
|
||||
return self[len(prefix) :]
|
||||
else:
|
||||
return self[:]
|
||||
|
||||
else:
|
||||
|
||||
def removesuffix(self, suffix):
|
||||
@ -21,3 +22,13 @@ else:
|
||||
|
||||
def removeprefix(self, prefix):
|
||||
return self.removeprefix(prefix)
|
||||
|
||||
|
||||
def aix_platform(osname, version, release):
|
||||
try:
|
||||
import _aix_support # type: ignore
|
||||
|
||||
return _aix_support.aix_platform()
|
||||
except ImportError:
|
||||
pass
|
||||
return f"{osname}-{version}.{release}"
|
||||
|
@ -30,7 +30,7 @@ class PyPIRCCommand(Command):
|
||||
realm = None
|
||||
|
||||
user_options = [
|
||||
('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY),
|
||||
('repository=', 'r', f"url of repository [default: {DEFAULT_REPOSITORY}]"),
|
||||
('show-response', None, 'display full response text from server'),
|
||||
]
|
||||
|
||||
@ -51,7 +51,7 @@ class PyPIRCCommand(Command):
|
||||
"""Reads the .pypirc file."""
|
||||
rc = self._get_rc_file()
|
||||
if os.path.exists(rc):
|
||||
self.announce('Using PyPI login from %s' % rc)
|
||||
self.announce(f'Using PyPI login from {rc}')
|
||||
repository = self.repository or self.DEFAULT_REPOSITORY
|
||||
|
||||
config = RawConfigParser()
|
||||
@ -129,7 +129,7 @@ class PyPIRCCommand(Command):
|
||||
"""Initialize options."""
|
||||
self.repository = None
|
||||
self.realm = None
|
||||
self.show_response = 0
|
||||
self.show_response = False
|
||||
|
||||
def finalize_options(self):
|
||||
"""Finalizes options."""
|
||||
|
@ -146,7 +146,7 @@ def setup(**attrs): # noqa: C901
|
||||
_setup_distribution = dist = klass(attrs)
|
||||
except DistutilsSetupError as msg:
|
||||
if 'name' not in attrs:
|
||||
raise SystemExit("error in setup command: %s" % msg)
|
||||
raise SystemExit(f"error in setup command: {msg}")
|
||||
else:
|
||||
raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg))
|
||||
|
||||
@ -170,7 +170,7 @@ def setup(**attrs): # noqa: C901
|
||||
try:
|
||||
ok = dist.parse_command_line()
|
||||
except DistutilsArgError as msg:
|
||||
raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
|
||||
raise SystemExit(gen_usage(dist.script_name) + f"\nerror: {msg}")
|
||||
|
||||
if DEBUG:
|
||||
print("options (after parsing command line):")
|
||||
@ -274,11 +274,8 @@ def run_setup(script_name, script_args=None, stop_after="run"):
|
||||
|
||||
if _setup_distribution is None:
|
||||
raise RuntimeError(
|
||||
(
|
||||
"'distutils.core.setup()' was never called -- "
|
||||
"perhaps '%s' is not a Distutils setup script?"
|
||||
)
|
||||
% script_name
|
||||
"'distutils.core.setup()' was never called -- "
|
||||
f"perhaps '{script_name}' is not a Distutils setup script?"
|
||||
)
|
||||
|
||||
# I wonder if the setup script's namespace -- g and l -- would be of
|
||||
|
@ -9,13 +9,11 @@ cygwin in no-cygwin mode).
|
||||
import copy
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import warnings
|
||||
from subprocess import check_output
|
||||
|
||||
from ._collections import RangeMap
|
||||
from .errors import (
|
||||
CCompilerError,
|
||||
CompileError,
|
||||
@ -26,42 +24,10 @@ from .file_util import write_file
|
||||
from .unixccompiler import UnixCCompiler
|
||||
from .version import LooseVersion, suppress_known_deprecation
|
||||
|
||||
_msvcr_lookup = RangeMap.left(
|
||||
{
|
||||
# MSVC 7.0
|
||||
1300: ['msvcr70'],
|
||||
# MSVC 7.1
|
||||
1310: ['msvcr71'],
|
||||
# VS2005 / MSVC 8.0
|
||||
1400: ['msvcr80'],
|
||||
# VS2008 / MSVC 9.0
|
||||
1500: ['msvcr90'],
|
||||
# VS2010 / MSVC 10.0
|
||||
1600: ['msvcr100'],
|
||||
# VS2012 / MSVC 11.0
|
||||
1700: ['msvcr110'],
|
||||
# VS2013 / MSVC 12.0
|
||||
1800: ['msvcr120'],
|
||||
# VS2015 / MSVC 14.0
|
||||
1900: ['vcruntime140'],
|
||||
2000: RangeMap.undefined_value,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def get_msvcr():
|
||||
"""Include the appropriate MSVC runtime library if Python was built
|
||||
with MSVC 7.0 or later.
|
||||
"""
|
||||
match = re.search(r'MSC v\.(\d{4})', sys.version)
|
||||
try:
|
||||
msc_ver = int(match.group(1))
|
||||
except AttributeError:
|
||||
return
|
||||
try:
|
||||
return _msvcr_lookup[msc_ver]
|
||||
except KeyError:
|
||||
raise ValueError("Unknown MS Compiler version %s " % msc_ver)
|
||||
"""No longer needed, but kept for backward compatibility."""
|
||||
return []
|
||||
|
||||
|
||||
_runtime_library_dirs_msg = (
|
||||
@ -83,7 +49,7 @@ class CygwinCCompiler(UnixCCompiler):
|
||||
dylib_lib_format = "cyg%s%s"
|
||||
exe_extension = ".exe"
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
def __init__(self, verbose=False, dry_run=False, force=False):
|
||||
super().__init__(verbose, dry_run, force)
|
||||
|
||||
status, details = check_config_h()
|
||||
@ -91,26 +57,28 @@ class CygwinCCompiler(UnixCCompiler):
|
||||
if status is not CONFIG_H_OK:
|
||||
self.warn(
|
||||
"Python's pyconfig.h doesn't seem to support your compiler. "
|
||||
"Reason: %s. "
|
||||
"Compiling may fail because of undefined preprocessor macros." % details
|
||||
f"Reason: {details}. "
|
||||
"Compiling may fail because of undefined preprocessor macros."
|
||||
)
|
||||
|
||||
self.cc = os.environ.get('CC', 'gcc')
|
||||
self.cxx = os.environ.get('CXX', 'g++')
|
||||
|
||||
self.linker_dll = self.cc
|
||||
self.linker_dll_cxx = self.cxx
|
||||
shared_option = "-shared"
|
||||
|
||||
self.set_executables(
|
||||
compiler='%s -mcygwin -O -Wall' % self.cc,
|
||||
compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
|
||||
compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
|
||||
linker_exe='%s -mcygwin' % self.cc,
|
||||
linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'),
|
||||
compiler=f'{self.cc} -mcygwin -O -Wall',
|
||||
compiler_so=f'{self.cc} -mcygwin -mdll -O -Wall',
|
||||
compiler_cxx=f'{self.cxx} -mcygwin -O -Wall',
|
||||
compiler_so_cxx=f'{self.cxx} -mcygwin -mdll -O -Wall',
|
||||
linker_exe=f'{self.cc} -mcygwin',
|
||||
linker_so=f'{self.linker_dll} -mcygwin {shared_option}',
|
||||
linker_exe_cxx=f'{self.cxx} -mcygwin',
|
||||
linker_so_cxx=f'{self.linker_dll_cxx} -mcygwin {shared_option}',
|
||||
)
|
||||
|
||||
# Include the appropriate MSVC runtime library if Python was built
|
||||
# with MSVC 7.0 or later.
|
||||
self.dll_libraries = get_msvcr()
|
||||
|
||||
@property
|
||||
@ -138,9 +106,17 @@ class CygwinCCompiler(UnixCCompiler):
|
||||
raise CompileError(msg)
|
||||
else: # for other files use the C-compiler
|
||||
try:
|
||||
self.spawn(
|
||||
self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs
|
||||
)
|
||||
if self.detect_language(src) == 'c++':
|
||||
self.spawn(
|
||||
self.compiler_so_cxx
|
||||
+ cc_args
|
||||
+ [src, '-o', obj]
|
||||
+ extra_postargs
|
||||
)
|
||||
else:
|
||||
self.spawn(
|
||||
self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs
|
||||
)
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
@ -154,7 +130,7 @@ class CygwinCCompiler(UnixCCompiler):
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
debug=False,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
@ -195,10 +171,9 @@ class CygwinCCompiler(UnixCCompiler):
|
||||
def_file = os.path.join(temp_dir, dll_name + ".def")
|
||||
|
||||
# Generate .def file
|
||||
contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"]
|
||||
for sym in export_symbols:
|
||||
contents.append(sym)
|
||||
self.execute(write_file, (def_file, contents), "writing %s" % def_file)
|
||||
contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"]
|
||||
contents.extend(export_symbols)
|
||||
self.execute(write_file, (def_file, contents), f"writing {def_file}")
|
||||
|
||||
# next add options for def-file
|
||||
|
||||
@ -265,7 +240,7 @@ class Mingw32CCompiler(CygwinCCompiler):
|
||||
|
||||
compiler_type = 'mingw32'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
def __init__(self, verbose=False, dry_run=False, force=False):
|
||||
super().__init__(verbose, dry_run, force)
|
||||
|
||||
shared_option = "-shared"
|
||||
@ -274,11 +249,14 @@ class Mingw32CCompiler(CygwinCCompiler):
|
||||
raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
|
||||
|
||||
self.set_executables(
|
||||
compiler='%s -O -Wall' % self.cc,
|
||||
compiler_so='%s -mdll -O -Wall' % self.cc,
|
||||
compiler_cxx='%s -O -Wall' % self.cxx,
|
||||
linker_exe='%s' % self.cc,
|
||||
compiler=f'{self.cc} -O -Wall',
|
||||
compiler_so=f'{self.cc} -shared -O -Wall',
|
||||
compiler_so_cxx=f'{self.cxx} -shared -O -Wall',
|
||||
compiler_cxx=f'{self.cxx} -O -Wall',
|
||||
linker_exe=f'{self.cc}',
|
||||
linker_so=f'{self.linker_dll} {shared_option}',
|
||||
linker_exe_cxx=f'{self.cxx}',
|
||||
linker_so_cxx=f'{self.linker_dll_cxx} {shared_option}',
|
||||
)
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
@ -330,6 +308,9 @@ def check_config_h():
|
||||
fn = sysconfig.get_config_h_filename()
|
||||
try:
|
||||
config_h = pathlib.Path(fn).read_text(encoding='utf-8')
|
||||
except OSError as exc:
|
||||
return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
|
||||
else:
|
||||
substring = '__GNUC__'
|
||||
if substring in config_h:
|
||||
code = CONFIG_H_OK
|
||||
@ -338,8 +319,6 @@ def check_config_h():
|
||||
code = CONFIG_H_NOTOK
|
||||
mention_inflected = 'does not mention'
|
||||
return code, f"{fn!r} {mention_inflected} {substring!r}"
|
||||
except OSError as exc:
|
||||
return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
|
||||
|
||||
|
||||
def is_cygwincc(cc):
|
||||
|
@ -10,10 +10,10 @@ from .errors import DistutilsFileError, DistutilsInternalError
|
||||
|
||||
# cache for by mkpath() -- in addition to cheapening redundant calls,
|
||||
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
|
||||
_path_created = {}
|
||||
_path_created = set()
|
||||
|
||||
|
||||
def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
|
||||
def mkpath(name, mode=0o777, verbose=True, dry_run=False): # noqa: C901
|
||||
"""Create a directory and any missing ancestor directories.
|
||||
|
||||
If the directory already exists (or if 'name' is the empty string, which
|
||||
@ -45,7 +45,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
|
||||
created_dirs = []
|
||||
if os.path.isdir(name) or name == '':
|
||||
return created_dirs
|
||||
if _path_created.get(os.path.abspath(name)):
|
||||
if os.path.abspath(name) in _path_created:
|
||||
return created_dirs
|
||||
|
||||
(head, tail) = os.path.split(name)
|
||||
@ -63,7 +63,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
|
||||
head = os.path.join(head, d)
|
||||
abs_head = os.path.abspath(head)
|
||||
|
||||
if _path_created.get(abs_head):
|
||||
if abs_head in _path_created:
|
||||
continue
|
||||
|
||||
if verbose >= 1:
|
||||
@ -79,11 +79,11 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
|
||||
)
|
||||
created_dirs.append(head)
|
||||
|
||||
_path_created[abs_head] = 1
|
||||
_path_created.add(abs_head)
|
||||
return created_dirs
|
||||
|
||||
|
||||
def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
|
||||
def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
|
||||
"""Create all the empty directories under 'base_dir' needed to put 'files'
|
||||
there.
|
||||
|
||||
@ -104,12 +104,12 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
|
||||
def copy_tree( # noqa: C901
|
||||
src,
|
||||
dst,
|
||||
preserve_mode=1,
|
||||
preserve_times=1,
|
||||
preserve_symlinks=0,
|
||||
update=0,
|
||||
verbose=1,
|
||||
dry_run=0,
|
||||
preserve_mode=True,
|
||||
preserve_times=True,
|
||||
preserve_symlinks=False,
|
||||
update=False,
|
||||
verbose=True,
|
||||
dry_run=False,
|
||||
):
|
||||
"""Copy an entire directory tree 'src' to a new location 'dst'.
|
||||
|
||||
@ -133,7 +133,7 @@ def copy_tree( # noqa: C901
|
||||
from distutils.file_util import copy_file
|
||||
|
||||
if not dry_run and not os.path.isdir(src):
|
||||
raise DistutilsFileError("cannot copy tree '%s': not a directory" % src)
|
||||
raise DistutilsFileError(f"cannot copy tree '{src}': not a directory")
|
||||
try:
|
||||
names = os.listdir(src)
|
||||
except OSError as e:
|
||||
@ -202,7 +202,7 @@ def _build_cmdtuple(path, cmdtuples):
|
||||
cmdtuples.append((os.rmdir, path))
|
||||
|
||||
|
||||
def remove_tree(directory, verbose=1, dry_run=0):
|
||||
def remove_tree(directory, verbose=True, dry_run=False):
|
||||
"""Recursively remove an entire directory tree.
|
||||
|
||||
Any errors are ignored (apart from being reported to stdout if 'verbose'
|
||||
@ -222,7 +222,7 @@ def remove_tree(directory, verbose=1, dry_run=0):
|
||||
# remove dir from cache if it's already there
|
||||
abspath = os.path.abspath(cmd[1])
|
||||
if abspath in _path_created:
|
||||
_path_created.pop(abspath)
|
||||
_path_created.remove(abspath)
|
||||
except OSError as exc:
|
||||
log.warning("error removing %s: %s", directory, exc)
|
||||
|
||||
|
@ -10,15 +10,12 @@ import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from collections.abc import Iterable
|
||||
from email import message_from_file
|
||||
|
||||
try:
|
||||
import warnings
|
||||
except ImportError:
|
||||
warnings = None
|
||||
|
||||
from ._log import log
|
||||
from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
|
||||
from .debug import DEBUG
|
||||
from .errors import (
|
||||
DistutilsArgError,
|
||||
@ -137,9 +134,9 @@ Common commands: (see '--help-commands' for more)
|
||||
"""
|
||||
|
||||
# Default values for our command-line options
|
||||
self.verbose = 1
|
||||
self.dry_run = 0
|
||||
self.help = 0
|
||||
self.verbose = True
|
||||
self.dry_run = False
|
||||
self.help = False
|
||||
for attr in self.display_option_names:
|
||||
setattr(self, attr, 0)
|
||||
|
||||
@ -247,10 +244,7 @@ Common commands: (see '--help-commands' for more)
|
||||
attrs['license'] = attrs['licence']
|
||||
del attrs['licence']
|
||||
msg = "'licence' distribution option is deprecated; use 'license'"
|
||||
if warnings is not None:
|
||||
warnings.warn(msg)
|
||||
else:
|
||||
sys.stderr.write(msg + "\n")
|
||||
warnings.warn(msg)
|
||||
|
||||
# Now work on the rest of the attributes. Any attribute that's
|
||||
# not already defined is invalid!
|
||||
@ -262,7 +256,7 @@ Common commands: (see '--help-commands' for more)
|
||||
elif hasattr(self, key):
|
||||
setattr(self, key, val)
|
||||
else:
|
||||
msg = "Unknown distribution option: %s" % repr(key)
|
||||
msg = f"Unknown distribution option: {key!r}"
|
||||
warnings.warn(msg)
|
||||
|
||||
# no-user-cfg is handled before other command line args
|
||||
@ -311,9 +305,9 @@ Common commands: (see '--help-commands' for more)
|
||||
for cmd_name in commands:
|
||||
opt_dict = self.command_options.get(cmd_name)
|
||||
if opt_dict is None:
|
||||
self.announce(indent + "no option dict for '%s' command" % cmd_name)
|
||||
self.announce(indent + f"no option dict for '{cmd_name}' command")
|
||||
else:
|
||||
self.announce(indent + "option dict for '%s' command:" % cmd_name)
|
||||
self.announce(indent + f"option dict for '{cmd_name}' command:")
|
||||
out = pformat(opt_dict)
|
||||
for line in out.split('\n'):
|
||||
self.announce(indent + " " + line)
|
||||
@ -339,7 +333,7 @@ Common commands: (see '--help-commands' for more)
|
||||
files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
|
||||
|
||||
if DEBUG:
|
||||
self.announce("using config files: %s" % ', '.join(files))
|
||||
self.announce("using config files: {}".format(', '.join(files)))
|
||||
|
||||
return files
|
||||
|
||||
@ -352,7 +346,8 @@ Common commands: (see '--help-commands' for more)
|
||||
prefix = '.' * (os.name == 'posix')
|
||||
filename = prefix + 'pydistutils.cfg'
|
||||
if self.want_user_cfg:
|
||||
yield pathlib.Path('~').expanduser() / filename
|
||||
with contextlib.suppress(RuntimeError):
|
||||
yield pathlib.Path('~').expanduser() / filename
|
||||
|
||||
# All platforms support local setup.cfg
|
||||
yield pathlib.Path('setup.cfg')
|
||||
@ -395,7 +390,7 @@ Common commands: (see '--help-commands' for more)
|
||||
parser = ConfigParser()
|
||||
for filename in filenames:
|
||||
if DEBUG:
|
||||
self.announce(" reading %s" % filename)
|
||||
self.announce(f" reading {filename}")
|
||||
parser.read(filename, encoding='utf-8')
|
||||
for section in parser.sections():
|
||||
options = parser.options(section)
|
||||
@ -525,7 +520,7 @@ Common commands: (see '--help-commands' for more)
|
||||
# Pull the current command from the head of the command line
|
||||
command = args[0]
|
||||
if not command_re.match(command):
|
||||
raise SystemExit("invalid command name '%s'" % command)
|
||||
raise SystemExit(f"invalid command name '{command}'")
|
||||
self.commands.append(command)
|
||||
|
||||
# Dig up the command class that implements this command, so we
|
||||
@ -540,7 +535,7 @@ Common commands: (see '--help-commands' for more)
|
||||
# to be sure that the basic "command" interface is implemented.
|
||||
if not issubclass(cmd_class, Command):
|
||||
raise DistutilsClassError(
|
||||
"command class %s must subclass Command" % cmd_class
|
||||
f"command class {cmd_class} must subclass Command"
|
||||
)
|
||||
|
||||
# Also make sure that the command object provides a list of its
|
||||
@ -579,7 +574,7 @@ Common commands: (see '--help-commands' for more)
|
||||
parser.set_negative_aliases(negative_opt)
|
||||
(args, opts) = parser.getopt(args[1:])
|
||||
if hasattr(opts, 'help') and opts.help:
|
||||
self._show_help(parser, display_options=0, commands=[cmd_class])
|
||||
self._show_help(parser, display_options=False, commands=[cmd_class])
|
||||
return
|
||||
|
||||
if hasattr(cmd_class, 'help_options') and isinstance(
|
||||
@ -622,7 +617,7 @@ Common commands: (see '--help-commands' for more)
|
||||
setattr(self.metadata, attr, value)
|
||||
|
||||
def _show_help(
|
||||
self, parser, global_options=1, display_options=1, commands: Iterable = ()
|
||||
self, parser, global_options=True, display_options=True, commands: Iterable = ()
|
||||
):
|
||||
"""Show help for the setup script command-line in the form of
|
||||
several lists of command-line options. 'parser' should be a
|
||||
@ -652,12 +647,11 @@ Common commands: (see '--help-commands' for more)
|
||||
if display_options:
|
||||
parser.set_option_table(self.display_options)
|
||||
parser.print_help(
|
||||
"Information display options (just display "
|
||||
+ "information, ignore any commands)"
|
||||
"Information display options (just display information, ignore any commands)"
|
||||
)
|
||||
print()
|
||||
|
||||
for command in self.commands:
|
||||
for command in commands:
|
||||
if isinstance(command, type) and issubclass(command, Command):
|
||||
klass = command
|
||||
else:
|
||||
@ -668,7 +662,7 @@ Common commands: (see '--help-commands' for more)
|
||||
)
|
||||
else:
|
||||
parser.set_option_table(klass.user_options)
|
||||
parser.print_help("Options for '%s' command:" % klass.__name__)
|
||||
parser.print_help(f"Options for '{klass.__name__}' command:")
|
||||
print()
|
||||
|
||||
print(gen_usage(self.script_name))
|
||||
@ -694,12 +688,12 @@ Common commands: (see '--help-commands' for more)
|
||||
# display that metadata in the order in which the user supplied the
|
||||
# metadata options.
|
||||
any_display_options = 0
|
||||
is_display_option = {}
|
||||
is_display_option = set()
|
||||
for option in self.display_options:
|
||||
is_display_option[option[0]] = 1
|
||||
is_display_option.add(option[0])
|
||||
|
||||
for opt, val in option_order:
|
||||
if val and is_display_option.get(opt):
|
||||
if val and opt in is_display_option:
|
||||
opt = translate_longopt(opt)
|
||||
value = getattr(self.metadata, "get_" + opt)()
|
||||
if opt in ('keywords', 'platforms'):
|
||||
@ -740,14 +734,9 @@ Common commands: (see '--help-commands' for more)
|
||||
import distutils.command
|
||||
|
||||
std_commands = distutils.command.__all__
|
||||
is_std = {}
|
||||
for cmd in std_commands:
|
||||
is_std[cmd] = 1
|
||||
is_std = set(std_commands)
|
||||
|
||||
extra_commands = []
|
||||
for cmd in self.cmdclass.keys():
|
||||
if not is_std.get(cmd):
|
||||
extra_commands.append(cmd)
|
||||
extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
|
||||
|
||||
max_length = 0
|
||||
for cmd in std_commands + extra_commands:
|
||||
@ -771,14 +760,9 @@ Common commands: (see '--help-commands' for more)
|
||||
import distutils.command
|
||||
|
||||
std_commands = distutils.command.__all__
|
||||
is_std = {}
|
||||
for cmd in std_commands:
|
||||
is_std[cmd] = 1
|
||||
is_std = set(std_commands)
|
||||
|
||||
extra_commands = []
|
||||
for cmd in self.cmdclass.keys():
|
||||
if not is_std.get(cmd):
|
||||
extra_commands.append(cmd)
|
||||
extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
|
||||
|
||||
rv = []
|
||||
for cmd in std_commands + extra_commands:
|
||||
@ -842,9 +826,9 @@ Common commands: (see '--help-commands' for more)
|
||||
self.cmdclass[command] = klass
|
||||
return klass
|
||||
|
||||
raise DistutilsModuleError("invalid command '%s'" % command)
|
||||
raise DistutilsModuleError(f"invalid command '{command}'")
|
||||
|
||||
def get_command_obj(self, command, create=1):
|
||||
def get_command_obj(self, command, create=True):
|
||||
"""Return the command object for 'command'. Normally this object
|
||||
is cached on a previous call to 'get_command_obj()'; if no command
|
||||
object for 'command' is in the cache, then we either create and
|
||||
@ -855,12 +839,12 @@ Common commands: (see '--help-commands' for more)
|
||||
if DEBUG:
|
||||
self.announce(
|
||||
"Distribution.get_command_obj(): "
|
||||
"creating '%s' command object" % command
|
||||
f"creating '{command}' command object"
|
||||
)
|
||||
|
||||
klass = self.get_command_class(command)
|
||||
cmd_obj = self.command_obj[command] = klass(self)
|
||||
self.have_run[command] = 0
|
||||
self.have_run[command] = False
|
||||
|
||||
# Set any options that were supplied in config files
|
||||
# or on the command line. (NB. support for error
|
||||
@ -887,7 +871,7 @@ Common commands: (see '--help-commands' for more)
|
||||
option_dict = self.get_option_dict(command_name)
|
||||
|
||||
if DEBUG:
|
||||
self.announce(" setting options for '%s' command:" % command_name)
|
||||
self.announce(f" setting options for '{command_name}' command:")
|
||||
for option, (source, value) in option_dict.items():
|
||||
if DEBUG:
|
||||
self.announce(f" {option} = {value} (from {source})")
|
||||
@ -915,7 +899,7 @@ Common commands: (see '--help-commands' for more)
|
||||
except ValueError as msg:
|
||||
raise DistutilsOptionError(msg)
|
||||
|
||||
def reinitialize_command(self, command, reinit_subcommands=0):
|
||||
def reinitialize_command(self, command, reinit_subcommands=False):
|
||||
"""Reinitializes a command to the state it was in when first
|
||||
returned by 'get_command_obj()': ie., initialized but not yet
|
||||
finalized. This provides the opportunity to sneak option
|
||||
@ -945,8 +929,8 @@ Common commands: (see '--help-commands' for more)
|
||||
if not command.finalized:
|
||||
return command
|
||||
command.initialize_options()
|
||||
command.finalized = 0
|
||||
self.have_run[command_name] = 0
|
||||
command.finalized = False
|
||||
self.have_run[command_name] = False
|
||||
self._set_command_options(command)
|
||||
|
||||
if reinit_subcommands:
|
||||
@ -986,7 +970,7 @@ Common commands: (see '--help-commands' for more)
|
||||
cmd_obj = self.get_command_obj(command)
|
||||
cmd_obj.ensure_finalized()
|
||||
cmd_obj.run()
|
||||
self.have_run[command] = 1
|
||||
self.have_run[command] = True
|
||||
|
||||
# -- Distribution query methods ------------------------------------
|
||||
|
||||
@ -1149,9 +1133,9 @@ class DistributionMetadata:
|
||||
version = '1.1'
|
||||
|
||||
# required fields
|
||||
file.write('Metadata-Version: %s\n' % version)
|
||||
file.write('Name: %s\n' % self.get_name())
|
||||
file.write('Version: %s\n' % self.get_version())
|
||||
file.write(f'Metadata-Version: {version}\n')
|
||||
file.write(f'Name: {self.get_name()}\n')
|
||||
file.write(f'Version: {self.get_version()}\n')
|
||||
|
||||
def maybe_write(header, val):
|
||||
if val:
|
||||
@ -1189,7 +1173,26 @@ class DistributionMetadata:
|
||||
return self.version or "0.0.0"
|
||||
|
||||
def get_fullname(self):
|
||||
return f"{self.get_name()}-{self.get_version()}"
|
||||
return self._fullname(self.get_name(), self.get_version())
|
||||
|
||||
@staticmethod
|
||||
def _fullname(name: str, version: str) -> str:
|
||||
"""
|
||||
>>> DistributionMetadata._fullname('setup.tools', '1.0-2')
|
||||
'setup_tools-1.0.post2'
|
||||
>>> DistributionMetadata._fullname('setup-tools', '1.2post2')
|
||||
'setup_tools-1.2.post2'
|
||||
>>> DistributionMetadata._fullname('setup-tools', '1.0-r2')
|
||||
'setup_tools-1.0.post2'
|
||||
>>> DistributionMetadata._fullname('setup.tools', '1.0.post')
|
||||
'setup_tools-1.0.post0'
|
||||
>>> DistributionMetadata._fullname('setup.tools', '1.0+ubuntu-1')
|
||||
'setup_tools-1.0+ubuntu.1'
|
||||
"""
|
||||
return "{}-{}".format(
|
||||
canonicalize_name(name).replace('-', '_'),
|
||||
canonicalize_version(version, strip_trailing_zero=False),
|
||||
)
|
||||
|
||||
def get_author(self):
|
||||
return self.author
|
||||
@ -1281,7 +1284,4 @@ def fix_help_options(options):
|
||||
"""Convert a 4-tuple 'help_options' list as found in various command
|
||||
classes to the 3-tuple form required by FancyGetopt.
|
||||
"""
|
||||
new_options = []
|
||||
for help_tuple in options:
|
||||
new_options.append(help_tuple[0:3])
|
||||
return new_options
|
||||
return [opt[0:3] for opt in options]
|
||||
|
@ -26,7 +26,7 @@ class Extension:
|
||||
name : string
|
||||
the full name of the extension, including any packages -- ie.
|
||||
*not* a filename or pathname, but Python dotted name
|
||||
sources : [string]
|
||||
sources : [string | os.PathLike]
|
||||
list of source filenames, relative to the distribution root
|
||||
(where the setup script lives), in Unix form (slash-separated)
|
||||
for portability. Source files may be C, C++, SWIG (.i),
|
||||
@ -105,12 +105,17 @@ class Extension:
|
||||
**kw, # To catch unknown keywords
|
||||
):
|
||||
if not isinstance(name, str):
|
||||
raise AssertionError("'name' must be a string")
|
||||
if not (isinstance(sources, list) and all(isinstance(v, str) for v in sources)):
|
||||
raise AssertionError("'sources' must be a list of strings")
|
||||
raise AssertionError("'name' must be a string") # noqa: TRY004
|
||||
if not (
|
||||
isinstance(sources, list)
|
||||
and all(isinstance(v, (str, os.PathLike)) for v in sources)
|
||||
):
|
||||
raise AssertionError(
|
||||
"'sources' must be a list of strings or PathLike objects."
|
||||
)
|
||||
|
||||
self.name = name
|
||||
self.sources = sources
|
||||
self.sources = list(map(os.fspath, sources))
|
||||
self.include_dirs = include_dirs or []
|
||||
self.define_macros = define_macros or []
|
||||
self.undef_macros = undef_macros or []
|
||||
@ -130,7 +135,7 @@ class Extension:
|
||||
if len(kw) > 0:
|
||||
options = [repr(option) for option in kw]
|
||||
options = ', '.join(sorted(options))
|
||||
msg = "Unknown Extension options: %s" % options
|
||||
msg = f"Unknown Extension options: {options}"
|
||||
warnings.warn(msg)
|
||||
|
||||
def __repr__(self):
|
||||
@ -150,11 +155,11 @@ def read_setup_file(filename): # noqa: C901
|
||||
# <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
|
||||
file = TextFile(
|
||||
filename,
|
||||
strip_comments=1,
|
||||
skip_blanks=1,
|
||||
join_lines=1,
|
||||
lstrip_ws=1,
|
||||
rstrip_ws=1,
|
||||
strip_comments=True,
|
||||
skip_blanks=True,
|
||||
join_lines=True,
|
||||
lstrip_ws=True,
|
||||
rstrip_ws=True,
|
||||
)
|
||||
try:
|
||||
extensions = []
|
||||
@ -167,7 +172,7 @@ def read_setup_file(filename): # noqa: C901
|
||||
continue
|
||||
|
||||
if line[0] == line[-1] == "*":
|
||||
file.warn("'%s' lines not handled yet" % line)
|
||||
file.warn(f"'{line}' lines not handled yet")
|
||||
continue
|
||||
|
||||
line = expand_makefile_vars(line, vars)
|
||||
@ -233,7 +238,7 @@ def read_setup_file(filename): # noqa: C901
|
||||
# and append it to sources. Hmmmm.
|
||||
ext.extra_objects.append(word)
|
||||
else:
|
||||
file.warn("unrecognized argument '%s'" % word)
|
||||
file.warn(f"unrecognized argument '{word}'")
|
||||
|
||||
extensions.append(ext)
|
||||
finally:
|
||||
|
@ -21,7 +21,7 @@ from .errors import DistutilsArgError, DistutilsGetoptError
|
||||
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
||||
# The similarities to NAME are again not a coincidence...
|
||||
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
||||
longopt_re = re.compile(r'^%s$' % longopt_pat)
|
||||
longopt_re = re.compile(rf'^{longopt_pat}$')
|
||||
|
||||
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
||||
neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$")
|
||||
@ -95,7 +95,7 @@ class FancyGetopt:
|
||||
def add_option(self, long_option, short_option=None, help_string=None):
|
||||
if long_option in self.option_index:
|
||||
raise DistutilsGetoptError(
|
||||
"option conflict: already an option '%s'" % long_option
|
||||
f"option conflict: already an option '{long_option}'"
|
||||
)
|
||||
else:
|
||||
option = (long_option, short_option, help_string)
|
||||
@ -118,11 +118,11 @@ class FancyGetopt:
|
||||
for alias, opt in aliases.items():
|
||||
if alias not in self.option_index:
|
||||
raise DistutilsGetoptError(
|
||||
f"invalid {what} '{alias}': " f"option '{alias}' not defined"
|
||||
f"invalid {what} '{alias}': option '{alias}' not defined"
|
||||
)
|
||||
if opt not in self.option_index:
|
||||
raise DistutilsGetoptError(
|
||||
f"invalid {what} '{alias}': " f"aliased option '{opt}' not defined"
|
||||
f"invalid {what} '{alias}': aliased option '{opt}' not defined"
|
||||
)
|
||||
|
||||
def set_aliases(self, alias):
|
||||
@ -162,13 +162,13 @@ class FancyGetopt:
|
||||
# Type- and value-check the option names
|
||||
if not isinstance(long, str) or len(long) < 2:
|
||||
raise DistutilsGetoptError(
|
||||
("invalid long option '%s': must be a string of length >= 2") % long
|
||||
f"invalid long option '{long}': must be a string of length >= 2"
|
||||
)
|
||||
|
||||
if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
|
||||
raise DistutilsGetoptError(
|
||||
"invalid short option '%s': "
|
||||
"must a single character or None" % short
|
||||
f"invalid short option '{short}': "
|
||||
"must a single character or None"
|
||||
)
|
||||
|
||||
self.repeat[long] = repeat
|
||||
@ -178,7 +178,7 @@ class FancyGetopt:
|
||||
if short:
|
||||
short = short + ':'
|
||||
long = long[0:-1]
|
||||
self.takes_arg[long] = 1
|
||||
self.takes_arg[long] = True
|
||||
else:
|
||||
# Is option is a "negative alias" for some other option (eg.
|
||||
# "quiet" == "!verbose")?
|
||||
@ -191,7 +191,7 @@ class FancyGetopt:
|
||||
)
|
||||
|
||||
self.long_opts[-1] = long # XXX redundant?!
|
||||
self.takes_arg[long] = 0
|
||||
self.takes_arg[long] = False
|
||||
|
||||
# If this is an alias option, make sure its "takes arg" flag is
|
||||
# the same as the option it's aliased to.
|
||||
@ -210,8 +210,8 @@ class FancyGetopt:
|
||||
# '='.
|
||||
if not longopt_re.match(long):
|
||||
raise DistutilsGetoptError(
|
||||
"invalid long option name '%s' "
|
||||
"(must be letters, numbers, hyphens only" % long
|
||||
f"invalid long option name '{long}' "
|
||||
"(must be letters, numbers, hyphens only"
|
||||
)
|
||||
|
||||
self.attr_name[long] = self.get_attr_name(long)
|
||||
@ -268,7 +268,7 @@ class FancyGetopt:
|
||||
|
||||
attr = self.attr_name[opt]
|
||||
# The only repeating option at the moment is 'verbose'.
|
||||
# It has a negative option -q quiet, which should set verbose = 0.
|
||||
# It has a negative option -q quiet, which should set verbose = False.
|
||||
if val and self.repeat.get(attr) is not None:
|
||||
val = getattr(object, attr, 0) + 1
|
||||
setattr(object, attr, val)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user