Save new folder

This commit is contained in:
2025-11-09 10:02:18 +01:00
commit 5c733eac6b
21738 changed files with 4477854 additions and 0 deletions

View File

@ -0,0 +1,176 @@
"""A pure Python implementation of import."""
__all__ = ['__import__', 'import_module', 'invalidate_caches', 'reload']
# Bootstrap help #####################################################
# Until bootstrapping is complete, DO NOT import any modules that attempt
# to import importlib._bootstrap (directly or indirectly). Since this
# partially initialised package would be present in sys.modules, those
# modules would get an uninitialised copy of the source version, instead
# of a fully initialised version (either the frozen one or the one
# initialised below if the frozen one is not available).
import _imp # Just the builtin component, NOT the full Python module
import sys
try:
import _frozen_importlib as _bootstrap
except ImportError:
from . import _bootstrap
_bootstrap._setup(sys, _imp)
else:
# importlib._bootstrap is the built-in import, ensure we don't create
# a second copy of the module.
_bootstrap.__name__ = 'importlib._bootstrap'
_bootstrap.__package__ = 'importlib'
try:
_bootstrap.__file__ = __file__.replace('__init__.py', '_bootstrap.py')
except NameError:
# __file__ is not guaranteed to be defined, e.g. if this code gets
# frozen by a tool like cx_Freeze.
pass
sys.modules['importlib._bootstrap'] = _bootstrap
try:
import _frozen_importlib_external as _bootstrap_external
except ImportError:
from . import _bootstrap_external
_bootstrap_external._setup(_bootstrap)
_bootstrap._bootstrap_external = _bootstrap_external
else:
_bootstrap_external.__name__ = 'importlib._bootstrap_external'
_bootstrap_external.__package__ = 'importlib'
try:
_bootstrap_external.__file__ = __file__.replace('__init__.py', '_bootstrap_external.py')
except NameError:
# __file__ is not guaranteed to be defined, e.g. if this code gets
# frozen by a tool like cx_Freeze.
pass
sys.modules['importlib._bootstrap_external'] = _bootstrap_external
# To simplify imports in test code
_pack_uint32 = _bootstrap_external._pack_uint32
_unpack_uint32 = _bootstrap_external._unpack_uint32
# Fully bootstrapped at this point, import whatever you like, circular
# dependencies and startup overhead minimisation permitting :)
import types
import warnings
# Public API #########################################################
from ._bootstrap import __import__
def invalidate_caches():
"""Call the invalidate_caches() method on all meta path finders stored in
sys.meta_path (where implemented)."""
for finder in sys.meta_path:
if hasattr(finder, 'invalidate_caches'):
finder.invalidate_caches()
def find_loader(name, path=None):
"""Return the loader for the specified module.
This is a backward-compatible wrapper around find_spec().
This function is deprecated in favor of importlib.util.find_spec().
"""
warnings.warn('Deprecated since Python 3.4. '
'Use importlib.util.find_spec() instead.',
DeprecationWarning, stacklevel=2)
try:
loader = sys.modules[name].__loader__
if loader is None:
raise ValueError('{}.__loader__ is None'.format(name))
else:
return loader
except KeyError:
pass
except AttributeError:
raise ValueError('{}.__loader__ is not set'.format(name)) from None
spec = _bootstrap._find_spec(name, path)
# We won't worry about malformed specs (missing attributes).
if spec is None:
return None
if spec.loader is None:
if spec.submodule_search_locations is None:
raise ImportError('spec for {} missing loader'.format(name),
name=name)
raise ImportError('namespace packages do not have loaders',
name=name)
return spec.loader
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
level = 0
if name.startswith('.'):
if not package:
msg = ("the 'package' argument is required to perform a relative "
"import for {!r}")
raise TypeError(msg.format(name))
for character in name:
if character != '.':
break
level += 1
return _bootstrap._gcd_import(name[level:], package, level)
_RELOADING = {}
def reload(module):
"""Reload the module and return it.
The module must have been successfully imported before.
"""
if not module or not isinstance(module, types.ModuleType):
raise TypeError("reload() argument must be a module")
try:
name = module.__spec__.name
except AttributeError:
name = module.__name__
if sys.modules.get(name) is not module:
msg = "module {} not in sys.modules"
raise ImportError(msg.format(name), name=name)
if name in _RELOADING:
return _RELOADING[name]
_RELOADING[name] = module
try:
parent_name = name.rpartition('.')[0]
if parent_name:
try:
parent = sys.modules[parent_name]
except KeyError:
msg = "parent {!r} not in sys.modules"
raise ImportError(msg.format(parent_name),
name=parent_name) from None
else:
pkgpath = parent.__path__
else:
pkgpath = None
target = module
spec = module.__spec__ = _bootstrap._find_spec(name, pkgpath, target)
if spec is None:
raise ModuleNotFoundError(f"spec not found for the module {name!r}", name=name)
_bootstrap._exec(spec, module)
# The module may have replaced itself in sys.modules!
return sys.modules[name]
finally:
try:
del _RELOADING[name]
except KeyError:
pass

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,62 @@
import os
import pathlib
import zipfile
import tempfile
import functools
import contextlib
def from_package(package):
"""
Return a Traversable object for the given package.
"""
return fallback_resources(package.__spec__)
def fallback_resources(spec):
package_directory = pathlib.Path(spec.origin).parent
try:
archive_path = spec.loader.archive
rel_path = package_directory.relative_to(archive_path)
return zipfile.Path(archive_path, str(rel_path) + '/')
except Exception:
pass
return package_directory
@contextlib.contextmanager
def _tempfile(reader, suffix=''):
# Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
# blocks due to the need to close the temporary file to work on Windows
# properly.
fd, raw_path = tempfile.mkstemp(suffix=suffix)
try:
os.write(fd, reader())
os.close(fd)
yield pathlib.Path(raw_path)
finally:
try:
os.remove(raw_path)
except FileNotFoundError:
pass
@functools.singledispatch
@contextlib.contextmanager
def as_file(path):
"""
Given a Traversable object, return that object as a
path on the local file system in a context manager.
"""
with _tempfile(path.read_bytes, suffix=path.name) as local:
yield local
@as_file.register(pathlib.Path)
@contextlib.contextmanager
def _(path):
"""
Degenerate behavior for pathlib.Path objects.
"""
yield path

View File

@ -0,0 +1,474 @@
"""Abstract base classes related to import."""
from . import _bootstrap
from . import _bootstrap_external
from . import machinery
try:
import _frozen_importlib
except ImportError as exc:
if exc.name != '_frozen_importlib':
raise
_frozen_importlib = None
try:
import _frozen_importlib_external
except ImportError:
_frozen_importlib_external = _bootstrap_external
import abc
import warnings
from typing import Protocol, runtime_checkable
def _register(abstract_cls, *classes):
for cls in classes:
abstract_cls.register(cls)
if _frozen_importlib is not None:
try:
frozen_cls = getattr(_frozen_importlib, cls.__name__)
except AttributeError:
frozen_cls = getattr(_frozen_importlib_external, cls.__name__)
abstract_cls.register(frozen_cls)
class Finder(metaclass=abc.ABCMeta):
"""Legacy abstract base class for import finders.
It may be subclassed for compatibility with legacy third party
reimplementations of the import system. Otherwise, finder
implementations should derive from the more specific MetaPathFinder
or PathEntryFinder ABCs.
Deprecated since Python 3.3
"""
@abc.abstractmethod
def find_module(self, fullname, path=None):
"""An abstract method that should find a module.
The fullname is a str and the optional path is a str or None.
Returns a Loader object or None.
"""
class MetaPathFinder(Finder):
"""Abstract base class for import finders on sys.meta_path."""
# We don't define find_spec() here since that would break
# hasattr checks we do to support backward compatibility.
def find_module(self, fullname, path):
"""Return a loader for the module.
If no module is found, return None. The fullname is a str and
the path is a list of strings or None.
This method is deprecated since Python 3.4 in favor of
finder.find_spec(). If find_spec() exists then backwards-compatible
functionality is provided for this method.
"""
warnings.warn("MetaPathFinder.find_module() is deprecated since Python "
"3.4 in favor of MetaPathFinder.find_spec() "
"(available since 3.4)",
DeprecationWarning,
stacklevel=2)
if not hasattr(self, 'find_spec'):
return None
found = self.find_spec(fullname, path)
return found.loader if found is not None else None
def invalidate_caches(self):
"""An optional method for clearing the finder's cache, if any.
This method is used by importlib.invalidate_caches().
"""
_register(MetaPathFinder, machinery.BuiltinImporter, machinery.FrozenImporter,
machinery.PathFinder, machinery.WindowsRegistryFinder)
class PathEntryFinder(Finder):
"""Abstract base class for path entry finders used by PathFinder."""
# We don't define find_spec() here since that would break
# hasattr checks we do to support backward compatibility.
def find_loader(self, fullname):
"""Return (loader, namespace portion) for the path entry.
The fullname is a str. The namespace portion is a sequence of
path entries contributing to part of a namespace package. The
sequence may be empty. If loader is not None, the portion will
be ignored.
The portion will be discarded if another path entry finder
locates the module as a normal module or package.
This method is deprecated since Python 3.4 in favor of
finder.find_spec(). If find_spec() is provided than backwards-compatible
functionality is provided.
"""
warnings.warn("PathEntryFinder.find_loader() is deprecated since Python "
"3.4 in favor of PathEntryFinder.find_spec() "
"(available since 3.4)",
DeprecationWarning,
stacklevel=2)
if not hasattr(self, 'find_spec'):
return None, []
found = self.find_spec(fullname)
if found is not None:
if not found.submodule_search_locations:
portions = []
else:
portions = found.submodule_search_locations
return found.loader, portions
else:
return None, []
find_module = _bootstrap_external._find_module_shim
def invalidate_caches(self):
"""An optional method for clearing the finder's cache, if any.
This method is used by PathFinder.invalidate_caches().
"""
_register(PathEntryFinder, machinery.FileFinder)
class Loader(metaclass=abc.ABCMeta):
"""Abstract base class for import loaders."""
def create_module(self, spec):
"""Return a module to initialize and into which to load.
This method should raise ImportError if anything prevents it
from creating a new module. It may return None to indicate
that the spec should create the new module.
"""
# By default, defer to default semantics for the new module.
return None
# We don't define exec_module() here since that would break
# hasattr checks we do to support backward compatibility.
def load_module(self, fullname):
"""Return the loaded module.
The module must be added to sys.modules and have import-related
attributes set properly. The fullname is a str.
ImportError is raised on failure.
This method is deprecated in favor of loader.exec_module(). If
exec_module() exists then it is used to provide a backwards-compatible
functionality for this method.
"""
if not hasattr(self, 'exec_module'):
raise ImportError
return _bootstrap._load_module_shim(self, fullname)
def module_repr(self, module):
"""Return a module's repr.
Used by the module type when the method does not raise
NotImplementedError.
This method is deprecated.
"""
# The exception will cause ModuleType.__repr__ to ignore this method.
raise NotImplementedError
class ResourceLoader(Loader):
"""Abstract base class for loaders which can return data from their
back-end storage.
This ABC represents one of the optional protocols specified by PEP 302.
"""
@abc.abstractmethod
def get_data(self, path):
"""Abstract method which when implemented should return the bytes for
the specified path. The path must be a str."""
raise OSError
class InspectLoader(Loader):
"""Abstract base class for loaders which support inspection about the
modules they can load.
This ABC represents one of the optional protocols specified by PEP 302.
"""
def is_package(self, fullname):
"""Optional method which when implemented should return whether the
module is a package. The fullname is a str. Returns a bool.
Raises ImportError if the module cannot be found.
"""
raise ImportError
def get_code(self, fullname):
"""Method which returns the code object for the module.
The fullname is a str. Returns a types.CodeType if possible, else
returns None if a code object does not make sense
(e.g. built-in module). Raises ImportError if the module cannot be
found.
"""
source = self.get_source(fullname)
if source is None:
return None
return self.source_to_code(source)
@abc.abstractmethod
def get_source(self, fullname):
"""Abstract method which should return the source code for the
module. The fullname is a str. Returns a str.
Raises ImportError if the module cannot be found.
"""
raise ImportError
@staticmethod
def source_to_code(data, path='<string>'):
"""Compile 'data' into a code object.
The 'data' argument can be anything that compile() can handle. The'path'
argument should be where the data was retrieved (when applicable)."""
return compile(data, path, 'exec', dont_inherit=True)
exec_module = _bootstrap_external._LoaderBasics.exec_module
load_module = _bootstrap_external._LoaderBasics.load_module
_register(InspectLoader, machinery.BuiltinImporter, machinery.FrozenImporter)
class ExecutionLoader(InspectLoader):
"""Abstract base class for loaders that wish to support the execution of
modules as scripts.
This ABC represents one of the optional protocols specified in PEP 302.
"""
@abc.abstractmethod
def get_filename(self, fullname):
"""Abstract method which should return the value that __file__ is to be
set to.
Raises ImportError if the module cannot be found.
"""
raise ImportError
def get_code(self, fullname):
"""Method to return the code object for fullname.
Should return None if not applicable (e.g. built-in module).
Raise ImportError if the module cannot be found.
"""
source = self.get_source(fullname)
if source is None:
return None
try:
path = self.get_filename(fullname)
except ImportError:
return self.source_to_code(source)
else:
return self.source_to_code(source, path)
_register(ExecutionLoader, machinery.ExtensionFileLoader)
class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader):
"""Abstract base class partially implementing the ResourceLoader and
ExecutionLoader ABCs."""
_register(FileLoader, machinery.SourceFileLoader,
machinery.SourcelessFileLoader)
class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader):
"""Abstract base class for loading source code (and optionally any
corresponding bytecode).
To support loading from source code, the abstractmethods inherited from
ResourceLoader and ExecutionLoader need to be implemented. To also support
loading from bytecode, the optional methods specified directly by this ABC
is required.
Inherited abstractmethods not implemented in this ABC:
* ResourceLoader.get_data
* ExecutionLoader.get_filename
"""
def path_mtime(self, path):
"""Return the (int) modification time for the path (str)."""
if self.path_stats.__func__ is SourceLoader.path_stats:
raise OSError
return int(self.path_stats(path)['mtime'])
def path_stats(self, path):
"""Return a metadata dict for the source pointed to by the path (str).
Possible keys:
- 'mtime' (mandatory) is the numeric timestamp of last source
code modification;
- 'size' (optional) is the size in bytes of the source code.
"""
if self.path_mtime.__func__ is SourceLoader.path_mtime:
raise OSError
return {'mtime': self.path_mtime(path)}
def set_data(self, path, data):
"""Write the bytes to the path (if possible).
Accepts a str path and data as bytes.
Any needed intermediary directories are to be created. If for some
reason the file cannot be written because of permissions, fail
silently.
"""
_register(SourceLoader, machinery.SourceFileLoader)
class ResourceReader(metaclass=abc.ABCMeta):
"""Abstract base class to provide resource-reading support.
Loaders that support resource reading are expected to implement
the ``get_resource_reader(fullname)`` method and have it either return None
or an object compatible with this ABC.
"""
@abc.abstractmethod
def open_resource(self, resource):
"""Return an opened, file-like object for binary reading.
The 'resource' argument is expected to represent only a file name
and thus not contain any subdirectory components.
If the resource cannot be found, FileNotFoundError is raised.
"""
raise FileNotFoundError
@abc.abstractmethod
def resource_path(self, resource):
"""Return the file system path to the specified resource.
The 'resource' argument is expected to represent only a file name
and thus not contain any subdirectory components.
If the resource does not exist on the file system, raise
FileNotFoundError.
"""
raise FileNotFoundError
@abc.abstractmethod
def is_resource(self, name):
"""Return True if the named 'name' is consider a resource."""
raise FileNotFoundError
@abc.abstractmethod
def contents(self):
"""Return an iterable of strings over the contents of the package."""
return []
_register(ResourceReader, machinery.SourceFileLoader)
@runtime_checkable
class Traversable(Protocol):
"""
An object with a subset of pathlib.Path methods suitable for
traversing directories and opening files.
"""
@abc.abstractmethod
def iterdir(self):
"""
Yield Traversable objects in self
"""
@abc.abstractmethod
def read_bytes(self):
"""
Read contents of self as bytes
"""
@abc.abstractmethod
def read_text(self, encoding=None):
"""
Read contents of self as bytes
"""
@abc.abstractmethod
def is_dir(self):
"""
Return True if self is a dir
"""
@abc.abstractmethod
def is_file(self):
"""
Return True if self is a file
"""
@abc.abstractmethod
def joinpath(self, child):
"""
Return Traversable child in self
"""
@abc.abstractmethod
def __truediv__(self, child):
"""
Return Traversable child in self
"""
@abc.abstractmethod
def open(self, mode='r', *args, **kwargs):
"""
mode may be 'r' or 'rb' to open as text or binary. Return a handle
suitable for reading (same as pathlib.Path.open).
When opening as text, accepts encoding parameters such as those
accepted by io.TextIOWrapper.
"""
@abc.abstractproperty
def name(self):
# type: () -> str
"""
The base name of this object without any parent references.
"""
class TraversableResources(ResourceReader):
@abc.abstractmethod
def files(self):
"""Return a Traversable object for the loaded package."""
def open_resource(self, resource):
return self.files().joinpath(resource).open('rb')
def resource_path(self, resource):
raise FileNotFoundError(resource)
def is_resource(self, path):
return self.files().joinpath(path).isfile()
def contents(self):
return (item.name for item in self.files().iterdir())

View File

@ -0,0 +1,21 @@
"""The machinery of importlib: finders, loaders, hooks, etc."""
import _imp
from ._bootstrap import ModuleSpec
from ._bootstrap import BuiltinImporter
from ._bootstrap import FrozenImporter
from ._bootstrap_external import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES,
OPTIMIZED_BYTECODE_SUFFIXES, BYTECODE_SUFFIXES,
EXTENSION_SUFFIXES)
from ._bootstrap_external import WindowsRegistryFinder
from ._bootstrap_external import PathFinder
from ._bootstrap_external import FileFinder
from ._bootstrap_external import SourceFileLoader
from ._bootstrap_external import SourcelessFileLoader
from ._bootstrap_external import ExtensionFileLoader
def all_suffixes():
"""Returns a list of all recognized module suffixes for this process"""
return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES

View File

@ -0,0 +1,586 @@
import io
import os
import re
import abc
import csv
import sys
import email
import pathlib
import zipfile
import operator
import functools
import itertools
import posixpath
import collections
from configparser import ConfigParser
from contextlib import suppress
from importlib import import_module
from importlib.abc import MetaPathFinder
from itertools import starmap
__all__ = [
'Distribution',
'DistributionFinder',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
class PackageNotFoundError(ModuleNotFoundError):
"""The package was not found."""
class EntryPoint(
collections.namedtuple('EntryPointBase', 'name value group')):
"""An entry point as defined by Python packaging conventions.
See `the packaging docs on entry points
<https://packaging.python.org/specifications/entry-points/>`_
for more information.
"""
pattern = re.compile(
r'(?P<module>[\w.]+)\s*'
r'(:\s*(?P<attr>[\w.]+))?\s*'
r'(?P<extras>\[.*\])?\s*$'
)
"""
A regular expression describing the syntax for an entry point,
which might look like:
- module
- package.module
- package.module:attribute
- package.module:object.attribute
- package.module:attr [extra1, extra2]
Other combinations are possible as well.
The expression is lenient about whitespace around the ':',
following the attr, and following any extras.
"""
def load(self):
"""Load the entry point from its definition. If only a module
is indicated by the value, return that module. Otherwise,
return the named object.
"""
match = self.pattern.match(self.value)
module = import_module(match.group('module'))
attrs = filter(None, (match.group('attr') or '').split('.'))
return functools.reduce(getattr, attrs, module)
@property
def module(self):
match = self.pattern.match(self.value)
return match.group('module')
@property
def attr(self):
match = self.pattern.match(self.value)
return match.group('attr')
@property
def extras(self):
match = self.pattern.match(self.value)
return list(re.finditer(r'\w+', match.group('extras') or ''))
@classmethod
def _from_config(cls, config):
return [
cls(name, value, group)
for group in config.sections()
for name, value in config.items(group)
]
@classmethod
def _from_text(cls, text):
config = ConfigParser(delimiters='=')
# case sensitive: https://stackoverflow.com/q/1611799/812183
config.optionxform = str
try:
config.read_string(text)
except AttributeError: # pragma: nocover
# Python 2 has no read_string
config.readfp(io.StringIO(text))
return EntryPoint._from_config(config)
def __iter__(self):
"""
Supply iter so one may construct dicts of EntryPoints easily.
"""
return iter((self.name, self))
def __reduce__(self):
return (
self.__class__,
(self.name, self.value, self.group),
)
class PackagePath(pathlib.PurePosixPath):
"""A reference to a path in a package"""
def read_text(self, encoding='utf-8'):
with self.locate().open(encoding=encoding) as stream:
return stream.read()
def read_binary(self):
with self.locate().open('rb') as stream:
return stream.read()
def locate(self):
"""Return a path-like object for this path"""
return self.dist.locate_file(self)
class FileHash:
def __init__(self, spec):
self.mode, _, self.value = spec.partition('=')
def __repr__(self):
return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
class Distribution:
"""A Python distribution package."""
@abc.abstractmethod
def read_text(self, filename):
"""Attempt to load metadata file given by the name.
:param filename: The name of the file in the distribution info.
:return: The text if found, otherwise None.
"""
@abc.abstractmethod
def locate_file(self, path):
"""
Given a path to a file in this distribution, return a path
to it.
"""
@classmethod
def from_name(cls, name):
"""Return the Distribution for the given package name.
:param name: The name of the distribution package to search for.
:return: The Distribution instance (or subclass thereof) for the named
package, if found.
:raises PackageNotFoundError: When the named package's distribution
metadata cannot be found.
"""
for resolver in cls._discover_resolvers():
dists = resolver(DistributionFinder.Context(name=name))
dist = next(iter(dists), None)
if dist is not None:
return dist
else:
raise PackageNotFoundError(name)
@classmethod
def discover(cls, **kwargs):
"""Return an iterable of Distribution objects for all packages.
Pass a ``context`` or pass keyword arguments for constructing
a context.
:context: A ``DistributionFinder.Context`` object.
:return: Iterable of Distribution objects for all packages.
"""
context = kwargs.pop('context', None)
if context and kwargs:
raise ValueError("cannot accept context and kwargs")
context = context or DistributionFinder.Context(**kwargs)
return itertools.chain.from_iterable(
resolver(context)
for resolver in cls._discover_resolvers()
)
@staticmethod
def at(path):
"""Return a Distribution for the indicated metadata path
:param path: a string or path-like object
:return: a concrete Distribution instance for the path
"""
return PathDistribution(pathlib.Path(path))
@staticmethod
def _discover_resolvers():
"""Search the meta_path for resolvers."""
declared = (
getattr(finder, 'find_distributions', None)
for finder in sys.meta_path
)
return filter(None, declared)
@classmethod
def _local(cls, root='.'):
from pep517 import build, meta
system = build.compat_system(root)
builder = functools.partial(
meta.build,
source_dir=root,
system=system,
)
return PathDistribution(zipfile.Path(meta.build_as_zip(builder)))
@property
def metadata(self):
"""Return the parsed metadata for this Distribution.
The returned object will have keys that name the various bits of
metadata. See PEP 566 for details.
"""
text = (
self.read_text('METADATA')
or self.read_text('PKG-INFO')
# This last clause is here to support old egg-info files. Its
# effect is to just end up using the PathDistribution's self._path
# (which points to the egg-info file) attribute unchanged.
or self.read_text('')
)
return email.message_from_string(text)
@property
def version(self):
"""Return the 'Version' metadata for the distribution package."""
return self.metadata['Version']
@property
def entry_points(self):
return EntryPoint._from_text(self.read_text('entry_points.txt'))
@property
def files(self):
"""Files in this distribution.
:return: List of PackagePath for this distribution or None
Result is `None` if the metadata file that enumerates files
(i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
missing.
Result may be empty if the metadata exists but is empty.
"""
file_lines = self._read_files_distinfo() or self._read_files_egginfo()
def make_file(name, hash=None, size_str=None):
result = PackagePath(name)
result.hash = FileHash(hash) if hash else None
result.size = int(size_str) if size_str else None
result.dist = self
return result
return file_lines and list(starmap(make_file, csv.reader(file_lines)))
def _read_files_distinfo(self):
"""
Read the lines of RECORD
"""
text = self.read_text('RECORD')
return text and text.splitlines()
def _read_files_egginfo(self):
"""
SOURCES.txt might contain literal commas, so wrap each line
in quotes.
"""
text = self.read_text('SOURCES.txt')
return text and map('"{}"'.format, text.splitlines())
@property
def requires(self):
"""Generated requirements specified for this Distribution"""
reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
return reqs and list(reqs)
def _read_dist_info_reqs(self):
return self.metadata.get_all('Requires-Dist')
def _read_egg_info_reqs(self):
source = self.read_text('requires.txt')
return source and self._deps_from_requires_text(source)
@classmethod
def _deps_from_requires_text(cls, source):
section_pairs = cls._read_sections(source.splitlines())
sections = {
section: list(map(operator.itemgetter('line'), results))
for section, results in
itertools.groupby(section_pairs, operator.itemgetter('section'))
}
return cls._convert_egg_info_reqs_to_simple_reqs(sections)
@staticmethod
def _read_sections(lines):
section = None
for line in filter(None, lines):
section_match = re.match(r'\[(.*)\]$', line)
if section_match:
section = section_match.group(1)
continue
yield locals()
@staticmethod
def _convert_egg_info_reqs_to_simple_reqs(sections):
"""
Historically, setuptools would solicit and store 'extra'
requirements, including those with environment markers,
in separate sections. More modern tools expect each
dependency to be defined separately, with any relevant
extras and environment markers attached directly to that
requirement. This method converts the former to the
latter. See _test_deps_from_requires_text for an example.
"""
def make_condition(name):
return name and 'extra == "{name}"'.format(name=name)
def parse_condition(section):
section = section or ''
extra, sep, markers = section.partition(':')
if extra and markers:
markers = '({markers})'.format(markers=markers)
conditions = list(filter(None, [markers, make_condition(extra)]))
return '; ' + ' and '.join(conditions) if conditions else ''
for section, deps in sections.items():
for dep in deps:
yield dep + parse_condition(section)
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
class Context:
"""
Keyword arguments presented by the caller to
``distributions()`` or ``Distribution.discover()``
to narrow the scope of a search for distributions
in all DistributionFinders.
Each DistributionFinder may expect any parameters
and should attempt to honor the canonical
parameters defined below when appropriate.
"""
name = None
"""
Specific name for which a distribution finder should match.
A name of ``None`` matches all distributions.
"""
def __init__(self, **kwargs):
vars(self).update(kwargs)
@property
def path(self):
"""
The path that a distribution finder should search.
Typically refers to Python package paths and defaults
to ``sys.path``.
"""
return vars(self).get('path', sys.path)
@abc.abstractmethod
def find_distributions(self, context=Context()):
"""
Find distributions.
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the ``context``,
a DistributionFinder.Context instance.
"""
class FastPath:
"""
Micro-optimized class for searching a path for
children.
"""
def __init__(self, root):
self.root = root
self.base = os.path.basename(self.root).lower()
def joinpath(self, child):
return pathlib.Path(self.root, child)
def children(self):
with suppress(Exception):
return os.listdir(self.root or '')
with suppress(Exception):
return self.zip_children()
return []
def zip_children(self):
zip_path = zipfile.Path(self.root)
names = zip_path.root.namelist()
self.joinpath = zip_path.joinpath
return dict.fromkeys(
child.split(posixpath.sep, 1)[0]
for child in names
)
def is_egg(self, search):
base = self.base
return (
base == search.versionless_egg_name
or base.startswith(search.prefix)
and base.endswith('.egg'))
def search(self, name):
for child in self.children():
n_low = child.lower()
if (n_low in name.exact_matches
or n_low.startswith(name.prefix)
and n_low.endswith(name.suffixes)
# legacy case:
or self.is_egg(name) and n_low == 'egg-info'):
yield self.joinpath(child)
class Prepared:
"""
A prepared search for metadata on a possibly-named package.
"""
normalized = ''
prefix = ''
suffixes = '.dist-info', '.egg-info'
exact_matches = [''][:0]
versionless_egg_name = ''
def __init__(self, name):
self.name = name
if name is None:
return
self.normalized = name.lower().replace('-', '_')
self.prefix = self.normalized + '-'
self.exact_matches = [
self.normalized + suffix for suffix in self.suffixes]
self.versionless_egg_name = self.normalized + '.egg'
class MetadataPathFinder(DistributionFinder):
@classmethod
def find_distributions(cls, context=DistributionFinder.Context()):
"""
Find distributions.
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching ``context.name``
(or all names if ``None`` indicated) along the paths in the list
of directories ``context.path``.
"""
found = cls._search_paths(context.name, context.path)
return map(PathDistribution, found)
@classmethod
def _search_paths(cls, name, paths):
"""Find metadata directories in paths heuristically."""
return itertools.chain.from_iterable(
path.search(Prepared(name))
for path in map(FastPath, paths)
)
class PathDistribution(Distribution):
def __init__(self, path):
"""Construct a distribution from a path to the metadata directory.
:param path: A pathlib.Path or similar object supporting
.joinpath(), __div__, .parent, and .read_text().
"""
self._path = path
def read_text(self, filename):
with suppress(FileNotFoundError, IsADirectoryError, KeyError,
NotADirectoryError, PermissionError):
return self._path.joinpath(filename).read_text(encoding='utf-8')
read_text.__doc__ = Distribution.read_text.__doc__
def locate_file(self, path):
return self._path.parent / path
def distribution(distribution_name):
"""Get the ``Distribution`` instance for the named package.
:param distribution_name: The name of the distribution package as a string.
:return: A ``Distribution`` instance (or subclass thereof).
"""
return Distribution.from_name(distribution_name)
def distributions(**kwargs):
"""Get all ``Distribution`` instances in the current environment.
:return: An iterable of ``Distribution`` instances.
"""
return Distribution.discover(**kwargs)
def metadata(distribution_name):
"""Get the metadata for the named package.
:param distribution_name: The name of the distribution package to query.
:return: An email.Message containing the parsed metadata.
"""
return Distribution.from_name(distribution_name).metadata
def version(distribution_name):
"""Get the version string for the named package.
:param distribution_name: The name of the distribution package to query.
:return: The version string for the package as defined in the package's
"Version" metadata key.
"""
return distribution(distribution_name).version
def entry_points():
"""Return EntryPoint objects for all installed packages.
:return: EntryPoint objects for all installed packages.
"""
eps = itertools.chain.from_iterable(
dist.entry_points for dist in distributions())
by_group = operator.attrgetter('group')
ordered = sorted(eps, key=by_group)
grouped = itertools.groupby(ordered, by_group)
return {
group: tuple(eps)
for group, eps in grouped
}
def files(distribution_name):
"""Return a list of files for the named package.
:param distribution_name: The name of the distribution package to query.
:return: List of files composing the distribution.
"""
return distribution(distribution_name).files
def requires(distribution_name):
"""
Return a list of requirements for the named package.
:return: An iterator of requirements, suitable for
packaging.requirement.Requirement.
"""
return distribution(distribution_name).requires

View File

@ -0,0 +1,215 @@
import os
from . import abc as resources_abc
from . import _common
from ._common import as_file
from contextlib import contextmanager, suppress
from importlib import import_module
from importlib.abc import ResourceLoader
from io import BytesIO, TextIOWrapper
from pathlib import Path
from types import ModuleType
from typing import ContextManager, Iterable, Optional, Union
from typing import cast
from typing.io import BinaryIO, TextIO
__all__ = [
'Package',
'Resource',
'as_file',
'contents',
'files',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
Package = Union[str, ModuleType]
Resource = Union[str, os.PathLike]
def _resolve(name) -> ModuleType:
"""If name is a string, resolve to a module."""
if hasattr(name, '__spec__'):
return name
return import_module(name)
def _get_package(package) -> ModuleType:
"""Take a package name or module object and return the module.
If a name, the module is imported. If the resolved module
object is not a package, raise an exception.
"""
module = _resolve(package)
if module.__spec__.submodule_search_locations is None:
raise TypeError('{!r} is not a package'.format(package))
return module
def _normalize_path(path) -> str:
"""Normalize a path by ensuring it is a string.
If the resulting string contains path separators, an exception is raised.
"""
parent, file_name = os.path.split(path)
if parent:
raise ValueError('{!r} must be only a file name'.format(path))
return file_name
def _get_resource_reader(
package: ModuleType) -> Optional[resources_abc.ResourceReader]:
# Return the package's loader if it's a ResourceReader. We can't use
# a issubclass() check here because apparently abc.'s __subclasscheck__()
# hook wants to create a weak reference to the object, but
# zipimport.zipimporter does not support weak references, resulting in a
# TypeError. That seems terrible.
spec = package.__spec__
if hasattr(spec.loader, 'get_resource_reader'):
return cast(resources_abc.ResourceReader,
spec.loader.get_resource_reader(spec.name))
return None
def _check_location(package):
if package.__spec__.origin is None or not package.__spec__.has_location:
raise FileNotFoundError(f'Package has no location {package!r}')
def open_binary(package: Package, resource: Resource) -> BinaryIO:
"""Return a file-like object opened for binary reading of the resource."""
resource = _normalize_path(resource)
package = _get_package(package)
reader = _get_resource_reader(package)
if reader is not None:
return reader.open_resource(resource)
absolute_package_path = os.path.abspath(
package.__spec__.origin or 'non-existent file')
package_path = os.path.dirname(absolute_package_path)
full_path = os.path.join(package_path, resource)
try:
return open(full_path, mode='rb')
except OSError:
# Just assume the loader is a resource loader; all the relevant
# importlib.machinery loaders are and an AttributeError for
# get_data() will make it clear what is needed from the loader.
loader = cast(ResourceLoader, package.__spec__.loader)
data = None
if hasattr(package.__spec__.loader, 'get_data'):
with suppress(OSError):
data = loader.get_data(full_path)
if data is None:
package_name = package.__spec__.name
message = '{!r} resource not found in {!r}'.format(
resource, package_name)
raise FileNotFoundError(message)
return BytesIO(data)
def open_text(package: Package,
resource: Resource,
encoding: str = 'utf-8',
errors: str = 'strict') -> TextIO:
"""Return a file-like object opened for text reading of the resource."""
return TextIOWrapper(
open_binary(package, resource), encoding=encoding, errors=errors)
def read_binary(package: Package, resource: Resource) -> bytes:
"""Return the binary contents of the resource."""
with open_binary(package, resource) as fp:
return fp.read()
def read_text(package: Package,
resource: Resource,
encoding: str = 'utf-8',
errors: str = 'strict') -> str:
"""Return the decoded string of the resource.
The decoding-related arguments have the same semantics as those of
bytes.decode().
"""
with open_text(package, resource, encoding, errors) as fp:
return fp.read()
def files(package: Package) -> resources_abc.Traversable:
"""
Get a Traversable resource from a package
"""
return _common.from_package(_get_package(package))
def path(
package: Package, resource: Resource,
) -> 'ContextManager[Path]':
"""A context manager providing a file path object to the resource.
If the resource does not already exist on its own on the file system,
a temporary file will be created. If the file was created, the file
will be deleted upon exiting the context manager (no exception is
raised if the file was deleted prior to the context manager
exiting).
"""
reader = _get_resource_reader(_get_package(package))
return (
_path_from_reader(reader, resource)
if reader else
_common.as_file(files(package).joinpath(_normalize_path(resource)))
)
@contextmanager
def _path_from_reader(reader, resource):
norm_resource = _normalize_path(resource)
with suppress(FileNotFoundError):
yield Path(reader.resource_path(norm_resource))
return
opener_reader = reader.open_resource(norm_resource)
with _common._tempfile(opener_reader.read, suffix=norm_resource) as res:
yield res
def is_resource(package: Package, name: str) -> bool:
"""True if 'name' is a resource inside 'package'.
Directories are *not* resources.
"""
package = _get_package(package)
_normalize_path(name)
reader = _get_resource_reader(package)
if reader is not None:
return reader.is_resource(name)
package_contents = set(contents(package))
if name not in package_contents:
return False
return (_common.from_package(package) / name).is_file()
def contents(package: Package) -> Iterable[str]:
"""Return an iterable of entries in 'package'.
Note that not all entries are resources. Specifically, directories are
not considered resources. Use `is_resource()` on each entry returned here
to check if it is a resource or not.
"""
package = _get_package(package)
reader = _get_resource_reader(package)
if reader is not None:
return reader.contents()
# Is the package a namespace package? By definition, namespace packages
# cannot have resources.
namespace = (
package.__spec__.origin is None or
package.__spec__.origin == 'namespace'
)
if namespace or not package.__spec__.has_location:
return ()
return list(item.name for item in _common.from_package(package).iterdir())

View File

@ -0,0 +1,300 @@
"""Utility code for constructing importers, etc."""
from . import abc
from ._bootstrap import module_from_spec
from ._bootstrap import _resolve_name
from ._bootstrap import spec_from_loader
from ._bootstrap import _find_spec
from ._bootstrap_external import MAGIC_NUMBER
from ._bootstrap_external import _RAW_MAGIC_NUMBER
from ._bootstrap_external import cache_from_source
from ._bootstrap_external import decode_source
from ._bootstrap_external import source_from_cache
from ._bootstrap_external import spec_from_file_location
from contextlib import contextmanager
import _imp
import functools
import sys
import types
import warnings
def source_hash(source_bytes):
"Return the hash of *source_bytes* as used in hash-based pyc files."
return _imp.source_hash(_RAW_MAGIC_NUMBER, source_bytes)
def resolve_name(name, package):
"""Resolve a relative module name to an absolute one."""
if not name.startswith('.'):
return name
elif not package:
raise ImportError(f'no package specified for {repr(name)} '
'(required for relative module names)')
level = 0
for character in name:
if character != '.':
break
level += 1
return _resolve_name(name[level:], package, level)
def _find_spec_from_path(name, path=None):
"""Return the spec for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__spec__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable spec with the
value of 'path' given to the finders. None is returned if no spec could
be found.
Dotted names do not have their parent packages implicitly imported. You will
most likely need to explicitly import all parent packages in the proper
order for a submodule to get the correct spec.
"""
if name not in sys.modules:
return _find_spec(name, path)
else:
module = sys.modules[name]
if module is None:
return None
try:
spec = module.__spec__
except AttributeError:
raise ValueError('{}.__spec__ is not set'.format(name)) from None
else:
if spec is None:
raise ValueError('{}.__spec__ is None'.format(name))
return spec
def find_spec(name, package=None):
"""Return the spec for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__spec__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable spec with the
value of 'path' given to the finders. None is returned if no spec could
be found.
If the name is for submodule (contains a dot), the parent module is
automatically imported.
The name and package arguments work the same as importlib.import_module().
In other words, relative module names (with leading dots) work.
"""
fullname = resolve_name(name, package) if name.startswith('.') else name
if fullname not in sys.modules:
parent_name = fullname.rpartition('.')[0]
if parent_name:
parent = __import__(parent_name, fromlist=['__path__'])
try:
parent_path = parent.__path__
except AttributeError as e:
raise ModuleNotFoundError(
f"__path__ attribute not found on {parent_name!r} "
f"while trying to find {fullname!r}", name=fullname) from e
else:
parent_path = None
return _find_spec(fullname, parent_path)
else:
module = sys.modules[fullname]
if module is None:
return None
try:
spec = module.__spec__
except AttributeError:
raise ValueError('{}.__spec__ is not set'.format(name)) from None
else:
if spec is None:
raise ValueError('{}.__spec__ is None'.format(name))
return spec
@contextmanager
def _module_to_load(name):
is_reload = name in sys.modules
module = sys.modules.get(name)
if not is_reload:
# This must be done before open() is called as the 'io' module
# implicitly imports 'locale' and would otherwise trigger an
# infinite loop.
module = type(sys)(name)
# This must be done before putting the module in sys.modules
# (otherwise an optimization shortcut in import.c becomes wrong)
module.__initializing__ = True
sys.modules[name] = module
try:
yield module
except Exception:
if not is_reload:
try:
del sys.modules[name]
except KeyError:
pass
finally:
module.__initializing__ = False
def set_package(fxn):
"""Set __package__ on the returned module.
This function is deprecated.
"""
@functools.wraps(fxn)
def set_package_wrapper(*args, **kwargs):
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
module = fxn(*args, **kwargs)
if getattr(module, '__package__', None) is None:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
return module
return set_package_wrapper
def set_loader(fxn):
"""Set __loader__ on the returned module.
This function is deprecated.
"""
@functools.wraps(fxn)
def set_loader_wrapper(self, *args, **kwargs):
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
module = fxn(self, *args, **kwargs)
if getattr(module, '__loader__', None) is None:
module.__loader__ = self
return module
return set_loader_wrapper
def module_for_loader(fxn):
"""Decorator to handle selecting the proper module for loaders.
The decorated function is passed the module to use instead of the module
name. The module passed in to the function is either from sys.modules if
it already exists or is a new module. If the module is new, then __name__
is set the first argument to the method, __loader__ is set to self, and
__package__ is set accordingly (if self.is_package() is defined) will be set
before it is passed to the decorated function (if self.is_package() does
not work for the module it will be set post-load).
If an exception is raised and the decorator created the module it is
subsequently removed from sys.modules.
The decorator assumes that the decorated function takes the module name as
the second argument.
"""
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
@functools.wraps(fxn)
def module_for_loader_wrapper(self, fullname, *args, **kwargs):
with _module_to_load(fullname) as module:
module.__loader__ = self
try:
is_package = self.is_package(fullname)
except (ImportError, AttributeError):
pass
else:
if is_package:
module.__package__ = fullname
else:
module.__package__ = fullname.rpartition('.')[0]
# If __package__ was not set above, __import__() will do it later.
return fxn(self, module, *args, **kwargs)
return module_for_loader_wrapper
class _LazyModule(types.ModuleType):
"""A subclass of the module type which triggers loading upon attribute access."""
def __getattribute__(self, attr):
"""Trigger the load of the module and return the attribute."""
# All module metadata must be garnered from __spec__ in order to avoid
# using mutated values.
# Stop triggering this method.
self.__class__ = types.ModuleType
# Get the original name to make sure no object substitution occurred
# in sys.modules.
original_name = self.__spec__.name
# Figure out exactly what attributes were mutated between the creation
# of the module and now.
attrs_then = self.__spec__.loader_state['__dict__']
original_type = self.__spec__.loader_state['__class__']
attrs_now = self.__dict__
attrs_updated = {}
for key, value in attrs_now.items():
# Code that set the attribute may have kept a reference to the
# assigned object, making identity more important than equality.
if key not in attrs_then:
attrs_updated[key] = value
elif id(attrs_now[key]) != id(attrs_then[key]):
attrs_updated[key] = value
self.__spec__.loader.exec_module(self)
# If exec_module() was used directly there is no guarantee the module
# object was put into sys.modules.
if original_name in sys.modules:
if id(self) != id(sys.modules[original_name]):
raise ValueError(f"module object for {original_name!r} "
"substituted in sys.modules during a lazy "
"load")
# Update after loading since that's what would happen in an eager
# loading situation.
self.__dict__.update(attrs_updated)
return getattr(self, attr)
def __delattr__(self, attr):
"""Trigger the load and then perform the deletion."""
# To trigger the load and raise an exception if the attribute
# doesn't exist.
self.__getattribute__(attr)
delattr(self, attr)
class LazyLoader(abc.Loader):
"""A loader that creates a module which defers loading until attribute access."""
@staticmethod
def __check_eager_loader(loader):
if not hasattr(loader, 'exec_module'):
raise TypeError('loader must define exec_module()')
@classmethod
def factory(cls, loader):
"""Construct a callable which returns the eager loader made lazy."""
cls.__check_eager_loader(loader)
return lambda *args, **kwargs: cls(loader(*args, **kwargs))
def __init__(self, loader):
self.__check_eager_loader(loader)
self.loader = loader
def create_module(self, spec):
return self.loader.create_module(spec)
def exec_module(self, module):
"""Make the module load lazily."""
module.__spec__.loader = self.loader
module.__loader__ = self.loader
# Don't need to worry about deep-copying as trying to set an attribute
# on an object would have triggered the load,
# e.g. ``module.__spec__.loader = None`` would trigger a load from
# trying to access module.__spec__.
loader_state = {}
loader_state['__dict__'] = module.__dict__.copy()
loader_state['__class__'] = module.__class__
module.__spec__.loader_state = loader_state
module.__class__ = _LazyModule