first commit
This commit is contained in:
		| @@ -0,0 +1,35 @@ | ||||
| """For backward compatibility, expose main functions from | ||||
| ``setuptools.config.setupcfg`` | ||||
| """ | ||||
| import warnings | ||||
| from functools import wraps | ||||
| from textwrap import dedent | ||||
| from typing import Callable, TypeVar, cast | ||||
|  | ||||
| from .._deprecation_warning import SetuptoolsDeprecationWarning | ||||
| from . import setupcfg | ||||
|  | ||||
| Fn = TypeVar("Fn", bound=Callable) | ||||
|  | ||||
| __all__ = ('parse_configuration', 'read_configuration') | ||||
|  | ||||
|  | ||||
| def _deprecation_notice(fn: Fn) -> Fn: | ||||
|     @wraps(fn) | ||||
|     def _wrapper(*args, **kwargs): | ||||
|         msg = f"""\ | ||||
|         As setuptools moves its configuration towards `pyproject.toml`, | ||||
|         `{__name__}.{fn.__name__}` became deprecated. | ||||
|  | ||||
|         For the time being, you can use the `{setupcfg.__name__}` module | ||||
|         to access a backward compatible API, but this module is provisional | ||||
|         and might be removed in the future. | ||||
|         """ | ||||
|         warnings.warn(dedent(msg), SetuptoolsDeprecationWarning, stacklevel=2) | ||||
|         return fn(*args, **kwargs) | ||||
|  | ||||
|     return cast(Fn, _wrapper) | ||||
|  | ||||
|  | ||||
| read_configuration = _deprecation_notice(setupcfg.read_configuration) | ||||
| parse_configuration = _deprecation_notice(setupcfg.parse_configuration) | ||||
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							| @@ -0,0 +1,384 @@ | ||||
| """Translation layer between pyproject config and setuptools distribution and | ||||
| metadata objects. | ||||
|  | ||||
| The distribution and metadata objects are modeled after (an old version of) | ||||
| core metadata, therefore configs in the format specified for ``pyproject.toml`` | ||||
| need to be processed before being applied. | ||||
|  | ||||
| **PRIVATE MODULE**: API reserved for setuptools internal usage only. | ||||
| """ | ||||
| import logging | ||||
| import os | ||||
| import warnings | ||||
| from collections.abc import Mapping | ||||
| from email.headerregistry import Address | ||||
| from functools import partial, reduce | ||||
| from itertools import chain | ||||
| from types import MappingProxyType | ||||
| from typing import (TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, | ||||
|                     Type, Union, cast) | ||||
|  | ||||
| from setuptools._deprecation_warning import SetuptoolsDeprecationWarning | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from setuptools._importlib import metadata  # noqa | ||||
|     from setuptools.dist import Distribution  # noqa | ||||
|  | ||||
| EMPTY: Mapping = MappingProxyType({})  # Immutable dict-like | ||||
| _Path = Union[os.PathLike, str] | ||||
| _DictOrStr = Union[dict, str] | ||||
| _CorrespFn = Callable[["Distribution", Any, _Path], None] | ||||
| _Correspondence = Union[str, _CorrespFn] | ||||
|  | ||||
| _logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution": | ||||
|     """Apply configuration dict read with :func:`read_configuration`""" | ||||
|  | ||||
|     if not config: | ||||
|         return dist  # short-circuit unrelated pyproject.toml file | ||||
|  | ||||
|     root_dir = os.path.dirname(filename) or "." | ||||
|  | ||||
|     _apply_project_table(dist, config, root_dir) | ||||
|     _apply_tool_table(dist, config, filename) | ||||
|  | ||||
|     current_directory = os.getcwd() | ||||
|     os.chdir(root_dir) | ||||
|     try: | ||||
|         dist._finalize_requires() | ||||
|         dist._finalize_license_files() | ||||
|     finally: | ||||
|         os.chdir(current_directory) | ||||
|  | ||||
|     return dist | ||||
|  | ||||
|  | ||||
| def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path): | ||||
|     project_table = config.get("project", {}).copy() | ||||
|     if not project_table: | ||||
|         return  # short-circuit | ||||
|  | ||||
|     _handle_missing_dynamic(dist, project_table) | ||||
|     _unify_entry_points(project_table) | ||||
|  | ||||
|     for field, value in project_table.items(): | ||||
|         norm_key = json_compatible_key(field) | ||||
|         corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key) | ||||
|         if callable(corresp): | ||||
|             corresp(dist, value, root_dir) | ||||
|         else: | ||||
|             _set_config(dist, corresp, value) | ||||
|  | ||||
|  | ||||
| def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path): | ||||
|     tool_table = config.get("tool", {}).get("setuptools", {}) | ||||
|     if not tool_table: | ||||
|         return  # short-circuit | ||||
|  | ||||
|     for field, value in tool_table.items(): | ||||
|         norm_key = json_compatible_key(field) | ||||
|  | ||||
|         if norm_key in TOOL_TABLE_DEPRECATIONS: | ||||
|             suggestion = TOOL_TABLE_DEPRECATIONS[norm_key] | ||||
|             msg = f"The parameter `{norm_key}` is deprecated, {suggestion}" | ||||
|             warnings.warn(msg, SetuptoolsDeprecationWarning) | ||||
|  | ||||
|         norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key) | ||||
|         _set_config(dist, norm_key, value) | ||||
|  | ||||
|     _copy_command_options(config, dist, filename) | ||||
|  | ||||
|  | ||||
| def _handle_missing_dynamic(dist: "Distribution", project_table: dict): | ||||
|     """Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``""" | ||||
|     # TODO: Set fields back to `None` once the feature stabilizes | ||||
|     dynamic = set(project_table.get("dynamic", [])) | ||||
|     for field, getter in _PREVIOUSLY_DEFINED.items(): | ||||
|         if not (field in project_table or field in dynamic): | ||||
|             value = getter(dist) | ||||
|             if value: | ||||
|                 msg = _WouldIgnoreField.message(field, value) | ||||
|                 warnings.warn(msg, _WouldIgnoreField) | ||||
|  | ||||
|  | ||||
| def json_compatible_key(key: str) -> str: | ||||
|     """As defined in :pep:`566#json-compatible-metadata`""" | ||||
|     return key.lower().replace("-", "_") | ||||
|  | ||||
|  | ||||
| def _set_config(dist: "Distribution", field: str, value: Any): | ||||
|     setter = getattr(dist.metadata, f"set_{field}", None) | ||||
|     if setter: | ||||
|         setter(value) | ||||
|     elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES: | ||||
|         setattr(dist.metadata, field, value) | ||||
|     else: | ||||
|         setattr(dist, field, value) | ||||
|  | ||||
|  | ||||
| _CONTENT_TYPES = { | ||||
|     ".md": "text/markdown", | ||||
|     ".rst": "text/x-rst", | ||||
|     ".txt": "text/plain", | ||||
| } | ||||
|  | ||||
|  | ||||
| def _guess_content_type(file: str) -> Optional[str]: | ||||
|     _, ext = os.path.splitext(file.lower()) | ||||
|     if not ext: | ||||
|         return None | ||||
|  | ||||
|     if ext in _CONTENT_TYPES: | ||||
|         return _CONTENT_TYPES[ext] | ||||
|  | ||||
|     valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items()) | ||||
|     msg = f"only the following file extensions are recognized: {valid}." | ||||
|     raise ValueError(f"Undefined content type for {file}, {msg}") | ||||
|  | ||||
|  | ||||
| def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path): | ||||
|     from setuptools.config import expand | ||||
|  | ||||
|     if isinstance(val, str): | ||||
|         file: Union[str, list] = val | ||||
|         text = expand.read_files(file, root_dir) | ||||
|         ctype = _guess_content_type(val) | ||||
|     else: | ||||
|         file = val.get("file") or [] | ||||
|         text = val.get("text") or expand.read_files(file, root_dir) | ||||
|         ctype = val["content-type"] | ||||
|  | ||||
|     _set_config(dist, "long_description", text) | ||||
|  | ||||
|     if ctype: | ||||
|         _set_config(dist, "long_description_content_type", ctype) | ||||
|  | ||||
|     if file: | ||||
|         dist._referenced_files.add(cast(str, file)) | ||||
|  | ||||
|  | ||||
| def _license(dist: "Distribution", val: dict, root_dir: _Path): | ||||
|     from setuptools.config import expand | ||||
|  | ||||
|     if "file" in val: | ||||
|         _set_config(dist, "license", expand.read_files([val["file"]], root_dir)) | ||||
|         dist._referenced_files.add(val["file"]) | ||||
|     else: | ||||
|         _set_config(dist, "license", val["text"]) | ||||
|  | ||||
|  | ||||
| def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str): | ||||
|     field = [] | ||||
|     email_field = [] | ||||
|     for person in val: | ||||
|         if "name" not in person: | ||||
|             email_field.append(person["email"]) | ||||
|         elif "email" not in person: | ||||
|             field.append(person["name"]) | ||||
|         else: | ||||
|             addr = Address(display_name=person["name"], addr_spec=person["email"]) | ||||
|             email_field.append(str(addr)) | ||||
|  | ||||
|     if field: | ||||
|         _set_config(dist, kind, ", ".join(field)) | ||||
|     if email_field: | ||||
|         _set_config(dist, f"{kind}_email", ", ".join(email_field)) | ||||
|  | ||||
|  | ||||
| def _project_urls(dist: "Distribution", val: dict, _root_dir): | ||||
|     _set_config(dist, "project_urls", val) | ||||
|  | ||||
|  | ||||
| def _python_requires(dist: "Distribution", val: dict, _root_dir): | ||||
|     from setuptools.extern.packaging.specifiers import SpecifierSet | ||||
|  | ||||
|     _set_config(dist, "python_requires", SpecifierSet(val)) | ||||
|  | ||||
|  | ||||
| def _dependencies(dist: "Distribution", val: list, _root_dir): | ||||
|     if getattr(dist, "install_requires", []): | ||||
|         msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)" | ||||
|         warnings.warn(msg) | ||||
|     _set_config(dist, "install_requires", val) | ||||
|  | ||||
|  | ||||
| def _optional_dependencies(dist: "Distribution", val: dict, _root_dir): | ||||
|     existing = getattr(dist, "extras_require", {}) | ||||
|     _set_config(dist, "extras_require", {**existing, **val}) | ||||
|  | ||||
|  | ||||
| def _unify_entry_points(project_table: dict): | ||||
|     project = project_table | ||||
|     entry_points = project.pop("entry-points", project.pop("entry_points", {})) | ||||
|     renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"} | ||||
|     for key, value in list(project.items()):  # eager to allow modifications | ||||
|         norm_key = json_compatible_key(key) | ||||
|         if norm_key in renaming and value: | ||||
|             entry_points[renaming[norm_key]] = project.pop(key) | ||||
|  | ||||
|     if entry_points: | ||||
|         project["entry-points"] = { | ||||
|             name: [f"{k} = {v}" for k, v in group.items()] | ||||
|             for name, group in entry_points.items() | ||||
|         } | ||||
|  | ||||
|  | ||||
| def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path): | ||||
|     tool_table = pyproject.get("tool", {}) | ||||
|     cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {}) | ||||
|     valid_options = _valid_command_options(cmdclass) | ||||
|  | ||||
|     cmd_opts = dist.command_options | ||||
|     for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items(): | ||||
|         cmd = json_compatible_key(cmd) | ||||
|         valid = valid_options.get(cmd, set()) | ||||
|         cmd_opts.setdefault(cmd, {}) | ||||
|         for key, value in config.items(): | ||||
|             key = json_compatible_key(key) | ||||
|             cmd_opts[cmd][key] = (str(filename), value) | ||||
|             if key not in valid: | ||||
|                 # To avoid removing options that are specified dynamically we | ||||
|                 # just log a warn... | ||||
|                 _logger.warning(f"Command option {cmd}.{key} is not defined") | ||||
|  | ||||
|  | ||||
| def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]: | ||||
|     from .._importlib import metadata | ||||
|     from setuptools.dist import Distribution | ||||
|  | ||||
|     valid_options = {"global": _normalise_cmd_options(Distribution.global_options)} | ||||
|  | ||||
|     unloaded_entry_points = metadata.entry_points(group='distutils.commands') | ||||
|     loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points) | ||||
|     entry_points = (ep for ep in loaded_entry_points if ep) | ||||
|     for cmd, cmd_class in chain(entry_points, cmdclass.items()): | ||||
|         opts = valid_options.get(cmd, set()) | ||||
|         opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", [])) | ||||
|         valid_options[cmd] = opts | ||||
|  | ||||
|     return valid_options | ||||
|  | ||||
|  | ||||
| def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]: | ||||
|     # Ignore all the errors | ||||
|     try: | ||||
|         return (ep.name, ep.load()) | ||||
|     except Exception as ex: | ||||
|         msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}" | ||||
|         _logger.warning(f"{msg}: {ex}") | ||||
|         return None | ||||
|  | ||||
|  | ||||
| def _normalise_cmd_option_key(name: str) -> str: | ||||
|     return json_compatible_key(name).strip("_=") | ||||
|  | ||||
|  | ||||
| def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]: | ||||
|     return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc} | ||||
|  | ||||
|  | ||||
| def _attrgetter(attr): | ||||
|     """ | ||||
|     Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found | ||||
|     >>> from types import SimpleNamespace | ||||
|     >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) | ||||
|     >>> _attrgetter("a")(obj) | ||||
|     42 | ||||
|     >>> _attrgetter("b.c")(obj) | ||||
|     13 | ||||
|     >>> _attrgetter("d")(obj) is None | ||||
|     True | ||||
|     """ | ||||
|     return partial(reduce, lambda acc, x: getattr(acc, x, None), attr.split(".")) | ||||
|  | ||||
|  | ||||
| def _some_attrgetter(*items): | ||||
|     """ | ||||
|     Return the first "truth-y" attribute or None | ||||
|     >>> from types import SimpleNamespace | ||||
|     >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) | ||||
|     >>> _some_attrgetter("d", "a", "b.c")(obj) | ||||
|     42 | ||||
|     >>> _some_attrgetter("d", "e", "b.c", "a")(obj) | ||||
|     13 | ||||
|     >>> _some_attrgetter("d", "e", "f")(obj) is None | ||||
|     True | ||||
|     """ | ||||
|     def _acessor(obj): | ||||
|         values = (_attrgetter(i)(obj) for i in items) | ||||
|         return next((i for i in values if i is not None), None) | ||||
|     return _acessor | ||||
|  | ||||
|  | ||||
| PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = { | ||||
|     "readme": _long_description, | ||||
|     "license": _license, | ||||
|     "authors": partial(_people, kind="author"), | ||||
|     "maintainers": partial(_people, kind="maintainer"), | ||||
|     "urls": _project_urls, | ||||
|     "dependencies": _dependencies, | ||||
|     "optional_dependencies": _optional_dependencies, | ||||
|     "requires_python": _python_requires, | ||||
| } | ||||
|  | ||||
| TOOL_TABLE_RENAMES = {"script_files": "scripts"} | ||||
| TOOL_TABLE_DEPRECATIONS = { | ||||
|     "namespace_packages": "consider using implicit namespaces instead (PEP 420)." | ||||
| } | ||||
|  | ||||
| SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls", | ||||
|                       "provides_extras", "license_file", "license_files"} | ||||
|  | ||||
| _PREVIOUSLY_DEFINED = { | ||||
|     "name": _attrgetter("metadata.name"), | ||||
|     "version": _attrgetter("metadata.version"), | ||||
|     "description": _attrgetter("metadata.description"), | ||||
|     "readme": _attrgetter("metadata.long_description"), | ||||
|     "requires-python": _some_attrgetter("python_requires", "metadata.python_requires"), | ||||
|     "license": _attrgetter("metadata.license"), | ||||
|     "authors": _some_attrgetter("metadata.author", "metadata.author_email"), | ||||
|     "maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"), | ||||
|     "keywords": _attrgetter("metadata.keywords"), | ||||
|     "classifiers": _attrgetter("metadata.classifiers"), | ||||
|     "urls": _attrgetter("metadata.project_urls"), | ||||
|     "entry-points": _attrgetter("entry_points"), | ||||
|     "dependencies": _some_attrgetter("_orig_install_requires", "install_requires"), | ||||
|     "optional-dependencies": _some_attrgetter("_orig_extras_require", "extras_require"), | ||||
| } | ||||
|  | ||||
|  | ||||
| class _WouldIgnoreField(UserWarning): | ||||
|     """Inform users that ``pyproject.toml`` would overwrite previous metadata.""" | ||||
|  | ||||
|     MESSAGE = """\ | ||||
|     {field!r} defined outside of `pyproject.toml` would be ignored. | ||||
|     !!\n\n | ||||
|     ########################################################################## | ||||
|     # configuration would be ignored/result in error due to `pyproject.toml` # | ||||
|     ########################################################################## | ||||
|  | ||||
|     The following seems to be defined outside of `pyproject.toml`: | ||||
|  | ||||
|     `{field} = {value!r}` | ||||
|  | ||||
|     According to the spec (see the link below), however, setuptools CANNOT | ||||
|     consider this value unless {field!r} is listed as `dynamic`. | ||||
|  | ||||
|     https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ | ||||
|  | ||||
|     For the time being, `setuptools` will still consider the given value (as a | ||||
|     **transitional** measure), but please note that future releases of setuptools will | ||||
|     follow strictly the standard. | ||||
|  | ||||
|     To prevent this warning, you can list {field!r} under `dynamic` or alternatively | ||||
|     remove the `[project]` table from your file and rely entirely on other means of | ||||
|     configuration. | ||||
|     \n\n!! | ||||
|     """ | ||||
|  | ||||
|     @classmethod | ||||
|     def message(cls, field, value): | ||||
|         from inspect import cleandoc | ||||
|         return cleandoc(cls.MESSAGE.format(field=field, value=value)) | ||||
| @@ -0,0 +1,34 @@ | ||||
| from functools import reduce | ||||
| from typing import Any, Callable, Dict | ||||
|  | ||||
| from . import formats | ||||
| from .error_reporting import detailed_errors, ValidationError | ||||
| from .extra_validations import EXTRA_VALIDATIONS | ||||
| from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException | ||||
| from .fastjsonschema_validations import validate as _validate | ||||
|  | ||||
| __all__ = [ | ||||
|     "validate", | ||||
|     "FORMAT_FUNCTIONS", | ||||
|     "EXTRA_VALIDATIONS", | ||||
|     "ValidationError", | ||||
|     "JsonSchemaException", | ||||
|     "JsonSchemaValueException", | ||||
| ] | ||||
|  | ||||
|  | ||||
| FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = { | ||||
|     fn.__name__.replace("_", "-"): fn | ||||
|     for fn in formats.__dict__.values() | ||||
|     if callable(fn) and not fn.__name__.startswith("_") | ||||
| } | ||||
|  | ||||
|  | ||||
| def validate(data: Any) -> bool: | ||||
|     """Validate the given ``data`` object using JSON Schema | ||||
|     This function raises ``ValidationError`` if ``data`` is invalid. | ||||
|     """ | ||||
|     with detailed_errors(): | ||||
|         _validate(data, custom_formats=FORMAT_FUNCTIONS) | ||||
|     reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data) | ||||
|     return True | ||||
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							| @@ -0,0 +1,318 @@ | ||||
| import io | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| from contextlib import contextmanager | ||||
| from textwrap import indent, wrap | ||||
| from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast | ||||
|  | ||||
| from .fastjsonschema_exceptions import JsonSchemaValueException | ||||
|  | ||||
| _logger = logging.getLogger(__name__) | ||||
|  | ||||
| _MESSAGE_REPLACEMENTS = { | ||||
|     "must be named by propertyName definition": "keys must be named by", | ||||
|     "one of contains definition": "at least one item that matches", | ||||
|     " same as const definition:": "", | ||||
|     "only specified items": "only items matching the definition", | ||||
| } | ||||
|  | ||||
| _SKIP_DETAILS = ( | ||||
|     "must not be empty", | ||||
|     "is always invalid", | ||||
|     "must not be there", | ||||
| ) | ||||
|  | ||||
| _NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"} | ||||
|  | ||||
| _CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)") | ||||
| _IDENTIFIER = re.compile(r"^[\w_]+$", re.I) | ||||
|  | ||||
| _TOML_JARGON = { | ||||
|     "object": "table", | ||||
|     "property": "key", | ||||
|     "properties": "keys", | ||||
|     "property names": "keys", | ||||
| } | ||||
|  | ||||
|  | ||||
| class ValidationError(JsonSchemaValueException): | ||||
|     """Report violations of a given JSON schema. | ||||
|  | ||||
|     This class extends :exc:`~fastjsonschema.JsonSchemaValueException` | ||||
|     by adding the following properties: | ||||
|  | ||||
|     - ``summary``: an improved version of the ``JsonSchemaValueException`` error message | ||||
|       with only the necessary information) | ||||
|  | ||||
|     - ``details``: more contextual information about the error like the failing schema | ||||
|       itself and the value that violates the schema. | ||||
|  | ||||
|     Depending on the level of the verbosity of the ``logging`` configuration | ||||
|     the exception message will be only ``summary`` (default) or a combination of | ||||
|     ``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`). | ||||
|     """ | ||||
|  | ||||
|     summary = "" | ||||
|     details = "" | ||||
|     _original_message = "" | ||||
|  | ||||
|     @classmethod | ||||
|     def _from_jsonschema(cls, ex: JsonSchemaValueException): | ||||
|         formatter = _ErrorFormatting(ex) | ||||
|         obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule) | ||||
|         debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower() | ||||
|         if debug_code != "false":  # pragma: no cover | ||||
|             obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__ | ||||
|         obj._original_message = ex.message | ||||
|         obj.summary = formatter.summary | ||||
|         obj.details = formatter.details | ||||
|         return obj | ||||
|  | ||||
|  | ||||
| @contextmanager | ||||
| def detailed_errors(): | ||||
|     try: | ||||
|         yield | ||||
|     except JsonSchemaValueException as ex: | ||||
|         raise ValidationError._from_jsonschema(ex) from None | ||||
|  | ||||
|  | ||||
| class _ErrorFormatting: | ||||
|     def __init__(self, ex: JsonSchemaValueException): | ||||
|         self.ex = ex | ||||
|         self.name = f"`{self._simplify_name(ex.name)}`" | ||||
|         self._original_message = self.ex.message.replace(ex.name, self.name) | ||||
|         self._summary = "" | ||||
|         self._details = "" | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         if _logger.getEffectiveLevel() <= logging.DEBUG and self.details: | ||||
|             return f"{self.summary}\n\n{self.details}" | ||||
|  | ||||
|         return self.summary | ||||
|  | ||||
|     @property | ||||
|     def summary(self) -> str: | ||||
|         if not self._summary: | ||||
|             self._summary = self._expand_summary() | ||||
|  | ||||
|         return self._summary | ||||
|  | ||||
|     @property | ||||
|     def details(self) -> str: | ||||
|         if not self._details: | ||||
|             self._details = self._expand_details() | ||||
|  | ||||
|         return self._details | ||||
|  | ||||
|     def _simplify_name(self, name): | ||||
|         x = len("data.") | ||||
|         return name[x:] if name.startswith("data.") else name | ||||
|  | ||||
|     def _expand_summary(self): | ||||
|         msg = self._original_message | ||||
|  | ||||
|         for bad, repl in _MESSAGE_REPLACEMENTS.items(): | ||||
|             msg = msg.replace(bad, repl) | ||||
|  | ||||
|         if any(substring in msg for substring in _SKIP_DETAILS): | ||||
|             return msg | ||||
|  | ||||
|         schema = self.ex.rule_definition | ||||
|         if self.ex.rule in _NEED_DETAILS and schema: | ||||
|             summary = _SummaryWriter(_TOML_JARGON) | ||||
|             return f"{msg}:\n\n{indent(summary(schema), '    ')}" | ||||
|  | ||||
|         return msg | ||||
|  | ||||
|     def _expand_details(self) -> str: | ||||
|         optional = [] | ||||
|         desc_lines = self.ex.definition.pop("$$description", []) | ||||
|         desc = self.ex.definition.pop("description", None) or " ".join(desc_lines) | ||||
|         if desc: | ||||
|             description = "\n".join( | ||||
|                 wrap( | ||||
|                     desc, | ||||
|                     width=80, | ||||
|                     initial_indent="    ", | ||||
|                     subsequent_indent="    ", | ||||
|                     break_long_words=False, | ||||
|                 ) | ||||
|             ) | ||||
|             optional.append(f"DESCRIPTION:\n{description}") | ||||
|         schema = json.dumps(self.ex.definition, indent=4) | ||||
|         value = json.dumps(self.ex.value, indent=4) | ||||
|         defaults = [ | ||||
|             f"GIVEN VALUE:\n{indent(value, '    ')}", | ||||
|             f"OFFENDING RULE: {self.ex.rule!r}", | ||||
|             f"DEFINITION:\n{indent(schema, '    ')}", | ||||
|         ] | ||||
|         return "\n\n".join(optional + defaults) | ||||
|  | ||||
|  | ||||
| class _SummaryWriter: | ||||
|     _IGNORE = {"description", "default", "title", "examples"} | ||||
|  | ||||
|     def __init__(self, jargon: Optional[Dict[str, str]] = None): | ||||
|         self.jargon: Dict[str, str] = jargon or {} | ||||
|         # Clarify confusing terms | ||||
|         self._terms = { | ||||
|             "anyOf": "at least one of the following", | ||||
|             "oneOf": "exactly one of the following", | ||||
|             "allOf": "all of the following", | ||||
|             "not": "(*NOT* the following)", | ||||
|             "prefixItems": f"{self._jargon('items')} (in order)", | ||||
|             "items": "items", | ||||
|             "contains": "contains at least one of", | ||||
|             "propertyNames": ( | ||||
|                 f"non-predefined acceptable {self._jargon('property names')}" | ||||
|             ), | ||||
|             "patternProperties": f"{self._jargon('properties')} named via pattern", | ||||
|             "const": "predefined value", | ||||
|             "enum": "one of", | ||||
|         } | ||||
|         # Attributes that indicate that the definition is easy and can be done | ||||
|         # inline (e.g. string and number) | ||||
|         self._guess_inline_defs = [ | ||||
|             "enum", | ||||
|             "const", | ||||
|             "maxLength", | ||||
|             "minLength", | ||||
|             "pattern", | ||||
|             "format", | ||||
|             "minimum", | ||||
|             "maximum", | ||||
|             "exclusiveMinimum", | ||||
|             "exclusiveMaximum", | ||||
|             "multipleOf", | ||||
|         ] | ||||
|  | ||||
|     def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]: | ||||
|         if isinstance(term, list): | ||||
|             return [self.jargon.get(t, t) for t in term] | ||||
|         return self.jargon.get(term, term) | ||||
|  | ||||
|     def __call__( | ||||
|         self, | ||||
|         schema: Union[dict, List[dict]], | ||||
|         prefix: str = "", | ||||
|         *, | ||||
|         _path: Sequence[str] = (), | ||||
|     ) -> str: | ||||
|         if isinstance(schema, list): | ||||
|             return self._handle_list(schema, prefix, _path) | ||||
|  | ||||
|         filtered = self._filter_unecessary(schema, _path) | ||||
|         simple = self._handle_simple_dict(filtered, _path) | ||||
|         if simple: | ||||
|             return f"{prefix}{simple}" | ||||
|  | ||||
|         child_prefix = self._child_prefix(prefix, "  ") | ||||
|         item_prefix = self._child_prefix(prefix, "- ") | ||||
|         indent = len(prefix) * " " | ||||
|         with io.StringIO() as buffer: | ||||
|             for i, (key, value) in enumerate(filtered.items()): | ||||
|                 child_path = [*_path, key] | ||||
|                 line_prefix = prefix if i == 0 else indent | ||||
|                 buffer.write(f"{line_prefix}{self._label(child_path)}:") | ||||
|                 # ^  just the first item should receive the complete prefix | ||||
|                 if isinstance(value, dict): | ||||
|                     filtered = self._filter_unecessary(value, child_path) | ||||
|                     simple = self._handle_simple_dict(filtered, child_path) | ||||
|                     buffer.write( | ||||
|                         f" {simple}" | ||||
|                         if simple | ||||
|                         else f"\n{self(value, child_prefix, _path=child_path)}" | ||||
|                     ) | ||||
|                 elif isinstance(value, list) and ( | ||||
|                     key != "type" or self._is_property(child_path) | ||||
|                 ): | ||||
|                     children = self._handle_list(value, item_prefix, child_path) | ||||
|                     sep = " " if children.startswith("[") else "\n" | ||||
|                     buffer.write(f"{sep}{children}") | ||||
|                 else: | ||||
|                     buffer.write(f" {self._value(value, child_path)}\n") | ||||
|             return buffer.getvalue() | ||||
|  | ||||
|     def _is_unecessary(self, path: Sequence[str]) -> bool: | ||||
|         if self._is_property(path) or not path:  # empty path => instruction @ root | ||||
|             return False | ||||
|         key = path[-1] | ||||
|         return any(key.startswith(k) for k in "$_") or key in self._IGNORE | ||||
|  | ||||
|     def _filter_unecessary(self, schema: dict, path: Sequence[str]): | ||||
|         return { | ||||
|             key: value | ||||
|             for key, value in schema.items() | ||||
|             if not self._is_unecessary([*path, key]) | ||||
|         } | ||||
|  | ||||
|     def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]: | ||||
|         inline = any(p in value for p in self._guess_inline_defs) | ||||
|         simple = not any(isinstance(v, (list, dict)) for v in value.values()) | ||||
|         if inline or simple: | ||||
|             return f"{{{', '.join(self._inline_attrs(value, path))}}}\n" | ||||
|         return None | ||||
|  | ||||
|     def _handle_list( | ||||
|         self, schemas: list, prefix: str = "", path: Sequence[str] = () | ||||
|     ) -> str: | ||||
|         if self._is_unecessary(path): | ||||
|             return "" | ||||
|  | ||||
|         repr_ = repr(schemas) | ||||
|         if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60: | ||||
|             return f"{repr_}\n" | ||||
|  | ||||
|         item_prefix = self._child_prefix(prefix, "- ") | ||||
|         return "".join( | ||||
|             self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas) | ||||
|         ) | ||||
|  | ||||
|     def _is_property(self, path: Sequence[str]): | ||||
|         """Check if the given path can correspond to an arbitrarily named property""" | ||||
|         counter = 0 | ||||
|         for key in path[-2::-1]: | ||||
|             if key not in {"properties", "patternProperties"}: | ||||
|                 break | ||||
|             counter += 1 | ||||
|  | ||||
|         # If the counter if even, the path correspond to a JSON Schema keyword | ||||
|         # otherwise it can be any arbitrary string naming a property | ||||
|         return counter % 2 == 1 | ||||
|  | ||||
|     def _label(self, path: Sequence[str]) -> str: | ||||
|         *parents, key = path | ||||
|         if not self._is_property(path): | ||||
|             norm_key = _separate_terms(key) | ||||
|             return self._terms.get(key) or " ".join(self._jargon(norm_key)) | ||||
|  | ||||
|         if parents[-1] == "patternProperties": | ||||
|             return f"(regex {key!r})" | ||||
|         return repr(key)  # property name | ||||
|  | ||||
|     def _value(self, value: Any, path: Sequence[str]) -> str: | ||||
|         if path[-1] == "type" and not self._is_property(path): | ||||
|             type_ = self._jargon(value) | ||||
|             return ( | ||||
|                 f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_) | ||||
|             ) | ||||
|         return repr(value) | ||||
|  | ||||
|     def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]: | ||||
|         for key, value in schema.items(): | ||||
|             child_path = [*path, key] | ||||
|             yield f"{self._label(child_path)}: {self._value(value, child_path)}" | ||||
|  | ||||
|     def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str: | ||||
|         return len(parent_prefix) * " " + child_prefix | ||||
|  | ||||
|  | ||||
| def _separate_terms(word: str) -> List[str]: | ||||
|     """ | ||||
|     >>> _separate_terms("FooBar-foo") | ||||
|     ['foo', 'bar', 'foo'] | ||||
|     """ | ||||
|     return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w] | ||||
| @@ -0,0 +1,36 @@ | ||||
| """The purpose of this module is implement PEP 621 validations that are | ||||
| difficult to express as a JSON Schema (or that are not supported by the current | ||||
| JSON Schema library). | ||||
| """ | ||||
|  | ||||
| from typing import Mapping, TypeVar | ||||
|  | ||||
| from .error_reporting import ValidationError | ||||
|  | ||||
| T = TypeVar("T", bound=Mapping) | ||||
|  | ||||
|  | ||||
| class RedefiningStaticFieldAsDynamic(ValidationError): | ||||
|     """According to PEP 621: | ||||
|  | ||||
|     Build back-ends MUST raise an error if the metadata specifies a field | ||||
|     statically as well as being listed in dynamic. | ||||
|     """ | ||||
|  | ||||
|  | ||||
| def validate_project_dynamic(pyproject: T) -> T: | ||||
|     project_table = pyproject.get("project", {}) | ||||
|     dynamic = project_table.get("dynamic", []) | ||||
|  | ||||
|     for field in dynamic: | ||||
|         if field in project_table: | ||||
|             msg = f"You cannot provide a value for `project.{field}` and " | ||||
|             msg += "list it under `project.dynamic` at the same time" | ||||
|             name = f"data.project.{field}" | ||||
|             value = {field: project_table[field], "...": " # ...", "dynamic": dynamic} | ||||
|             raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621") | ||||
|  | ||||
|     return pyproject | ||||
|  | ||||
|  | ||||
| EXTRA_VALIDATIONS = (validate_project_dynamic,) | ||||
| @@ -0,0 +1,51 @@ | ||||
| import re | ||||
|  | ||||
|  | ||||
| SPLIT_RE = re.compile(r'[\.\[\]]+') | ||||
|  | ||||
|  | ||||
| class JsonSchemaException(ValueError): | ||||
|     """ | ||||
|     Base exception of ``fastjsonschema`` library. | ||||
|     """ | ||||
|  | ||||
|  | ||||
| class JsonSchemaValueException(JsonSchemaException): | ||||
|     """ | ||||
|     Exception raised by validation function. Available properties: | ||||
|  | ||||
|      * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``), | ||||
|      * invalid ``value`` (e.g. ``60``), | ||||
|      * ``name`` of a path in the data structure (e.g. ``data.property[index]``), | ||||
|      * ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``), | ||||
|      * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``), | ||||
|      * ``rule`` which the ``value`` is breaking (e.g. ``maximum``) | ||||
|      * and ``rule_definition`` (e.g. ``42``). | ||||
|  | ||||
|     .. versionchanged:: 2.14.0 | ||||
|         Added all extra properties. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, message, value=None, name=None, definition=None, rule=None): | ||||
|         super().__init__(message) | ||||
|         self.message = message | ||||
|         self.value = value | ||||
|         self.name = name | ||||
|         self.definition = definition | ||||
|         self.rule = rule | ||||
|  | ||||
|     @property | ||||
|     def path(self): | ||||
|         return [item for item in SPLIT_RE.split(self.name) if item != ''] | ||||
|  | ||||
|     @property | ||||
|     def rule_definition(self): | ||||
|         if not self.rule or not self.definition: | ||||
|             return None | ||||
|         return self.definition.get(self.rule) | ||||
|  | ||||
|  | ||||
| class JsonSchemaDefinitionException(JsonSchemaException): | ||||
|     """ | ||||
|     Exception raised by generator of validation function. | ||||
|     """ | ||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @@ -0,0 +1,259 @@ | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import string | ||||
| import typing | ||||
| from itertools import chain as _chain | ||||
|  | ||||
| _logger = logging.getLogger(__name__) | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # PEP 440 | ||||
|  | ||||
| VERSION_PATTERN = r""" | ||||
|     v? | ||||
|     (?: | ||||
|         (?:(?P<epoch>[0-9]+)!)?                           # epoch | ||||
|         (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment | ||||
|         (?P<pre>                                          # pre-release | ||||
|             [-_\.]? | ||||
|             (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) | ||||
|             [-_\.]? | ||||
|             (?P<pre_n>[0-9]+)? | ||||
|         )? | ||||
|         (?P<post>                                         # post release | ||||
|             (?:-(?P<post_n1>[0-9]+)) | ||||
|             | | ||||
|             (?: | ||||
|                 [-_\.]? | ||||
|                 (?P<post_l>post|rev|r) | ||||
|                 [-_\.]? | ||||
|                 (?P<post_n2>[0-9]+)? | ||||
|             ) | ||||
|         )? | ||||
|         (?P<dev>                                          # dev release | ||||
|             [-_\.]? | ||||
|             (?P<dev_l>dev) | ||||
|             [-_\.]? | ||||
|             (?P<dev_n>[0-9]+)? | ||||
|         )? | ||||
|     ) | ||||
|     (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version | ||||
| """ | ||||
|  | ||||
| VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I) | ||||
|  | ||||
|  | ||||
| def pep440(version: str) -> bool: | ||||
|     return VERSION_REGEX.match(version) is not None | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # PEP 508 | ||||
|  | ||||
| PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])" | ||||
| PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I) | ||||
|  | ||||
|  | ||||
| def pep508_identifier(name: str) -> bool: | ||||
|     return PEP508_IDENTIFIER_REGEX.match(name) is not None | ||||
|  | ||||
|  | ||||
| try: | ||||
|     try: | ||||
|         from packaging import requirements as _req | ||||
|     except ImportError:  # pragma: no cover | ||||
|         # let's try setuptools vendored version | ||||
|         from setuptools._vendor.packaging import requirements as _req  # type: ignore | ||||
|  | ||||
|     def pep508(value: str) -> bool: | ||||
|         try: | ||||
|             _req.Requirement(value) | ||||
|             return True | ||||
|         except _req.InvalidRequirement: | ||||
|             return False | ||||
|  | ||||
| except ImportError:  # pragma: no cover | ||||
|     _logger.warning( | ||||
|         "Could not find an installation of `packaging`. Requirements, dependencies and " | ||||
|         "versions might not be validated. " | ||||
|         "To enforce validation, please install `packaging`." | ||||
|     ) | ||||
|  | ||||
|     def pep508(value: str) -> bool: | ||||
|         return True | ||||
|  | ||||
|  | ||||
| def pep508_versionspec(value: str) -> bool: | ||||
|     """Expression that can be used to specify/lock versions (including ranges)""" | ||||
|     if any(c in value for c in (";", "]", "@")): | ||||
|         # In PEP 508: | ||||
|         # conditional markers, extras and URL specs are not included in the | ||||
|         # versionspec | ||||
|         return False | ||||
|     # Let's pretend we have a dependency called `requirement` with the given | ||||
|     # version spec, then we can re-use the pep508 function for validation: | ||||
|     return pep508(f"requirement{value}") | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # PEP 517 | ||||
|  | ||||
|  | ||||
| def pep517_backend_reference(value: str) -> bool: | ||||
|     module, _, obj = value.partition(":") | ||||
|     identifiers = (i.strip() for i in _chain(module.split("."), obj.split("."))) | ||||
|     return all(python_identifier(i) for i in identifiers if i) | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # Classifiers - PEP 301 | ||||
|  | ||||
|  | ||||
| def _download_classifiers() -> str: | ||||
|     import ssl | ||||
|     from email.message import Message | ||||
|     from urllib.request import urlopen | ||||
|  | ||||
|     url = "https://pypi.org/pypi?:action=list_classifiers" | ||||
|     context = ssl.create_default_context() | ||||
|     with urlopen(url, context=context) as response: | ||||
|         headers = Message() | ||||
|         headers["content_type"] = response.getheader("content-type", "text/plain") | ||||
|         return response.read().decode(headers.get_param("charset", "utf-8")) | ||||
|  | ||||
|  | ||||
| class _TroveClassifier: | ||||
|     """The ``trove_classifiers`` package is the official way of validating classifiers, | ||||
|     however this package might not be always available. | ||||
|     As a workaround we can still download a list from PyPI. | ||||
|     We also don't want to be over strict about it, so simply skipping silently is an | ||||
|     option (classifiers will be validated anyway during the upload to PyPI). | ||||
|     """ | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.downloaded: typing.Union[None, False, typing.Set[str]] = None | ||||
|         self._skip_download = False | ||||
|         # None => not cached yet | ||||
|         # False => cache not available | ||||
|         self.__name__ = "trove_classifier"  # Emulate a public function | ||||
|  | ||||
|     def _disable_download(self): | ||||
|         # This is a private API. Only setuptools has the consent of using it. | ||||
|         self._skip_download = True | ||||
|  | ||||
|     def __call__(self, value: str) -> bool: | ||||
|         if self.downloaded is False or self._skip_download is True: | ||||
|             return True | ||||
|  | ||||
|         if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"): | ||||
|             self.downloaded = False | ||||
|             msg = ( | ||||
|                 "Install ``trove-classifiers`` to ensure proper validation. " | ||||
|                 "Skipping download of classifiers list from PyPI (NO_NETWORK)." | ||||
|             ) | ||||
|             _logger.debug(msg) | ||||
|             return True | ||||
|  | ||||
|         if self.downloaded is None: | ||||
|             msg = ( | ||||
|                 "Install ``trove-classifiers`` to ensure proper validation. " | ||||
|                 "Meanwhile a list of classifiers will be downloaded from PyPI." | ||||
|             ) | ||||
|             _logger.debug(msg) | ||||
|             try: | ||||
|                 self.downloaded = set(_download_classifiers().splitlines()) | ||||
|             except Exception: | ||||
|                 self.downloaded = False | ||||
|                 _logger.debug("Problem with download, skipping validation") | ||||
|                 return True | ||||
|  | ||||
|         return value in self.downloaded or value.lower().startswith("private ::") | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from trove_classifiers import classifiers as _trove_classifiers | ||||
|  | ||||
|     def trove_classifier(value: str) -> bool: | ||||
|         return value in _trove_classifiers or value.lower().startswith("private ::") | ||||
|  | ||||
| except ImportError:  # pragma: no cover | ||||
|     trove_classifier = _TroveClassifier() | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # Non-PEP related | ||||
|  | ||||
|  | ||||
| def url(value: str) -> bool: | ||||
|     from urllib.parse import urlparse | ||||
|  | ||||
|     try: | ||||
|         parts = urlparse(value) | ||||
|         if not parts.scheme: | ||||
|             _logger.warning( | ||||
|                 "For maximum compatibility please make sure to include a " | ||||
|                 "`scheme` prefix in your URL (e.g. 'http://'). " | ||||
|                 f"Given value: {value}" | ||||
|             ) | ||||
|             if not (value.startswith("/") or value.startswith("\\") or "@" in value): | ||||
|                 parts = urlparse(f"http://{value}") | ||||
|  | ||||
|         return bool(parts.scheme and parts.netloc) | ||||
|     except Exception: | ||||
|         return False | ||||
|  | ||||
|  | ||||
| # https://packaging.python.org/specifications/entry-points/ | ||||
| ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?" | ||||
| ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I) | ||||
| RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+" | ||||
| RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I) | ||||
| ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*" | ||||
| ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I) | ||||
|  | ||||
|  | ||||
| def python_identifier(value: str) -> bool: | ||||
|     return value.isidentifier() | ||||
|  | ||||
|  | ||||
| def python_qualified_identifier(value: str) -> bool: | ||||
|     if value.startswith(".") or value.endswith("."): | ||||
|         return False | ||||
|     return all(python_identifier(m) for m in value.split(".")) | ||||
|  | ||||
|  | ||||
| def python_module_name(value: str) -> bool: | ||||
|     return python_qualified_identifier(value) | ||||
|  | ||||
|  | ||||
| def python_entrypoint_group(value: str) -> bool: | ||||
|     return ENTRYPOINT_GROUP_REGEX.match(value) is not None | ||||
|  | ||||
|  | ||||
| def python_entrypoint_name(value: str) -> bool: | ||||
|     if not ENTRYPOINT_REGEX.match(value): | ||||
|         return False | ||||
|     if not RECOMMEDED_ENTRYPOINT_REGEX.match(value): | ||||
|         msg = f"Entry point `{value}` does not follow recommended pattern: " | ||||
|         msg += RECOMMEDED_ENTRYPOINT_PATTERN | ||||
|         _logger.warning(msg) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| def python_entrypoint_reference(value: str) -> bool: | ||||
|     module, _, rest = value.partition(":") | ||||
|     if "[" in rest: | ||||
|         obj, _, extras_ = rest.partition("[") | ||||
|         if extras_.strip()[-1] != "]": | ||||
|             return False | ||||
|         extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(",")) | ||||
|         if not all(pep508_identifier(e) for e in extras): | ||||
|             return False | ||||
|         _logger.warning(f"`{value}` - using extras for entry points is not recommended") | ||||
|     else: | ||||
|         obj = rest | ||||
|  | ||||
|     module_parts = module.split(".") | ||||
|     identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts | ||||
|     return all(python_identifier(i.strip()) for i in identifiers) | ||||
							
								
								
									
										462
									
								
								venv/lib/python3.11/site-packages/setuptools/config/expand.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										462
									
								
								venv/lib/python3.11/site-packages/setuptools/config/expand.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,462 @@ | ||||
| """Utility functions to expand configuration directives or special values | ||||
| (such glob patterns). | ||||
|  | ||||
| We can split the process of interpreting configuration files into 2 steps: | ||||
|  | ||||
| 1. The parsing the file contents from strings to value objects | ||||
|    that can be understand by Python (for example a string with a comma | ||||
|    separated list of keywords into an actual Python list of strings). | ||||
|  | ||||
| 2. The expansion (or post-processing) of these values according to the | ||||
|    semantics ``setuptools`` assign to them (for example a configuration field | ||||
|    with the ``file:`` directive should be expanded from a list of file paths to | ||||
|    a single string with the contents of those files concatenated) | ||||
|  | ||||
| This module focus on the second step, and therefore allow sharing the expansion | ||||
| functions among several configuration file formats. | ||||
|  | ||||
| **PRIVATE MODULE**: API reserved for setuptools internal usage only. | ||||
| """ | ||||
| import ast | ||||
| import importlib | ||||
| import io | ||||
| import os | ||||
| import pathlib | ||||
| import sys | ||||
| import warnings | ||||
| from glob import iglob | ||||
| from configparser import ConfigParser | ||||
| from importlib.machinery import ModuleSpec | ||||
| from itertools import chain | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     Callable, | ||||
|     Dict, | ||||
|     Iterable, | ||||
|     Iterator, | ||||
|     List, | ||||
|     Mapping, | ||||
|     Optional, | ||||
|     Tuple, | ||||
|     TypeVar, | ||||
|     Union, | ||||
|     cast | ||||
| ) | ||||
| from pathlib import Path | ||||
| from types import ModuleType | ||||
|  | ||||
| from distutils.errors import DistutilsOptionError | ||||
|  | ||||
| from .._path import same_path as _same_path | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from setuptools.dist import Distribution  # noqa | ||||
|     from setuptools.discovery import ConfigDiscovery  # noqa | ||||
|     from distutils.dist import DistributionMetadata  # noqa | ||||
|  | ||||
| chain_iter = chain.from_iterable | ||||
| _Path = Union[str, os.PathLike] | ||||
| _K = TypeVar("_K") | ||||
| _V = TypeVar("_V", covariant=True) | ||||
|  | ||||
|  | ||||
| class StaticModule: | ||||
|     """Proxy to a module object that avoids executing arbitrary code.""" | ||||
|  | ||||
|     def __init__(self, name: str, spec: ModuleSpec): | ||||
|         module = ast.parse(pathlib.Path(spec.origin).read_bytes()) | ||||
|         vars(self).update(locals()) | ||||
|         del self.self | ||||
|  | ||||
|     def _find_assignments(self) -> Iterator[Tuple[ast.AST, ast.AST]]: | ||||
|         for statement in self.module.body: | ||||
|             if isinstance(statement, ast.Assign): | ||||
|                 yield from ((target, statement.value) for target in statement.targets) | ||||
|             elif isinstance(statement, ast.AnnAssign) and statement.value: | ||||
|                 yield (statement.target, statement.value) | ||||
|  | ||||
|     def __getattr__(self, attr): | ||||
|         """Attempt to load an attribute "statically", via :func:`ast.literal_eval`.""" | ||||
|         try: | ||||
|             return next( | ||||
|                 ast.literal_eval(value) | ||||
|                 for target, value in self._find_assignments() | ||||
|                 if isinstance(target, ast.Name) and target.id == attr | ||||
|             ) | ||||
|         except Exception as e: | ||||
|             raise AttributeError(f"{self.name} has no attribute {attr}") from e | ||||
|  | ||||
|  | ||||
| def glob_relative( | ||||
|     patterns: Iterable[str], root_dir: Optional[_Path] = None | ||||
| ) -> List[str]: | ||||
|     """Expand the list of glob patterns, but preserving relative paths. | ||||
|  | ||||
|     :param list[str] patterns: List of glob patterns | ||||
|     :param str root_dir: Path to which globs should be relative | ||||
|                          (current directory by default) | ||||
|     :rtype: list | ||||
|     """ | ||||
|     glob_characters = {'*', '?', '[', ']', '{', '}'} | ||||
|     expanded_values = [] | ||||
|     root_dir = root_dir or os.getcwd() | ||||
|     for value in patterns: | ||||
|  | ||||
|         # Has globby characters? | ||||
|         if any(char in value for char in glob_characters): | ||||
|             # then expand the glob pattern while keeping paths *relative*: | ||||
|             glob_path = os.path.abspath(os.path.join(root_dir, value)) | ||||
|             expanded_values.extend(sorted( | ||||
|                 os.path.relpath(path, root_dir).replace(os.sep, "/") | ||||
|                 for path in iglob(glob_path, recursive=True))) | ||||
|  | ||||
|         else: | ||||
|             # take the value as-is | ||||
|             path = os.path.relpath(value, root_dir).replace(os.sep, "/") | ||||
|             expanded_values.append(path) | ||||
|  | ||||
|     return expanded_values | ||||
|  | ||||
|  | ||||
| def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str: | ||||
|     """Return the content of the files concatenated using ``\n`` as str | ||||
|  | ||||
|     This function is sandboxed and won't reach anything outside ``root_dir`` | ||||
|  | ||||
|     (By default ``root_dir`` is the current directory). | ||||
|     """ | ||||
|     from setuptools.extern.more_itertools import always_iterable | ||||
|  | ||||
|     root_dir = os.path.abspath(root_dir or os.getcwd()) | ||||
|     _filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths)) | ||||
|     return '\n'.join( | ||||
|         _read_file(path) | ||||
|         for path in _filter_existing_files(_filepaths) | ||||
|         if _assert_local(path, root_dir) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]: | ||||
|     for path in filepaths: | ||||
|         if os.path.isfile(path): | ||||
|             yield path | ||||
|         else: | ||||
|             warnings.warn(f"File {path!r} cannot be found") | ||||
|  | ||||
|  | ||||
| def _read_file(filepath: Union[bytes, _Path]) -> str: | ||||
|     with io.open(filepath, encoding='utf-8') as f: | ||||
|         return f.read() | ||||
|  | ||||
|  | ||||
| def _assert_local(filepath: _Path, root_dir: str): | ||||
|     if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents: | ||||
|         msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})" | ||||
|         raise DistutilsOptionError(msg) | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
| def read_attr( | ||||
|     attr_desc: str, | ||||
|     package_dir: Optional[Mapping[str, str]] = None, | ||||
|     root_dir: Optional[_Path] = None | ||||
| ): | ||||
|     """Reads the value of an attribute from a module. | ||||
|  | ||||
|     This function will try to read the attributed statically first | ||||
|     (via :func:`ast.literal_eval`), and only evaluate the module if it fails. | ||||
|  | ||||
|     Examples: | ||||
|         read_attr("package.attr") | ||||
|         read_attr("package.module.attr") | ||||
|  | ||||
|     :param str attr_desc: Dot-separated string describing how to reach the | ||||
|         attribute (see examples above) | ||||
|     :param dict[str, str] package_dir: Mapping of package names to their | ||||
|         location in disk (represented by paths relative to ``root_dir``). | ||||
|     :param str root_dir: Path to directory containing all the packages in | ||||
|         ``package_dir`` (current directory by default). | ||||
|     :rtype: str | ||||
|     """ | ||||
|     root_dir = root_dir or os.getcwd() | ||||
|     attrs_path = attr_desc.strip().split('.') | ||||
|     attr_name = attrs_path.pop() | ||||
|     module_name = '.'.join(attrs_path) | ||||
|     module_name = module_name or '__init__' | ||||
|     _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir) | ||||
|     spec = _find_spec(module_name, path) | ||||
|  | ||||
|     try: | ||||
|         return getattr(StaticModule(module_name, spec), attr_name) | ||||
|     except Exception: | ||||
|         # fallback to evaluate module | ||||
|         module = _load_spec(spec, module_name) | ||||
|         return getattr(module, attr_name) | ||||
|  | ||||
|  | ||||
| def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec: | ||||
|     spec = importlib.util.spec_from_file_location(module_name, module_path) | ||||
|     spec = spec or importlib.util.find_spec(module_name) | ||||
|  | ||||
|     if spec is None: | ||||
|         raise ModuleNotFoundError(module_name) | ||||
|  | ||||
|     return spec | ||||
|  | ||||
|  | ||||
| def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType: | ||||
|     name = getattr(spec, "__name__", module_name) | ||||
|     if name in sys.modules: | ||||
|         return sys.modules[name] | ||||
|     module = importlib.util.module_from_spec(spec) | ||||
|     sys.modules[name] = module  # cache (it also ensures `==` works on loaded items) | ||||
|     spec.loader.exec_module(module)  # type: ignore | ||||
|     return module | ||||
|  | ||||
|  | ||||
| def _find_module( | ||||
|     module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path | ||||
| ) -> Tuple[_Path, Optional[str], str]: | ||||
|     """Given a module (that could normally be imported by ``module_name`` | ||||
|     after the build is complete), find the path to the parent directory where | ||||
|     it is contained and the canonical name that could be used to import it | ||||
|     considering the ``package_dir`` in the build configuration and ``root_dir`` | ||||
|     """ | ||||
|     parent_path = root_dir | ||||
|     module_parts = module_name.split('.') | ||||
|     if package_dir: | ||||
|         if module_parts[0] in package_dir: | ||||
|             # A custom path was specified for the module we want to import | ||||
|             custom_path = package_dir[module_parts[0]] | ||||
|             parts = custom_path.rsplit('/', 1) | ||||
|             if len(parts) > 1: | ||||
|                 parent_path = os.path.join(root_dir, parts[0]) | ||||
|                 parent_module = parts[1] | ||||
|             else: | ||||
|                 parent_module = custom_path | ||||
|             module_name = ".".join([parent_module, *module_parts[1:]]) | ||||
|         elif '' in package_dir: | ||||
|             # A custom parent directory was specified for all root modules | ||||
|             parent_path = os.path.join(root_dir, package_dir['']) | ||||
|  | ||||
|     path_start = os.path.join(parent_path, *module_name.split(".")) | ||||
|     candidates = chain( | ||||
|         (f"{path_start}.py", os.path.join(path_start, "__init__.py")), | ||||
|         iglob(f"{path_start}.*") | ||||
|     ) | ||||
|     module_path = next((x for x in candidates if os.path.isfile(x)), None) | ||||
|     return parent_path, module_path, module_name | ||||
|  | ||||
|  | ||||
| def resolve_class( | ||||
|     qualified_class_name: str, | ||||
|     package_dir: Optional[Mapping[str, str]] = None, | ||||
|     root_dir: Optional[_Path] = None | ||||
| ) -> Callable: | ||||
|     """Given a qualified class name, return the associated class object""" | ||||
|     root_dir = root_dir or os.getcwd() | ||||
|     idx = qualified_class_name.rfind('.') | ||||
|     class_name = qualified_class_name[idx + 1 :] | ||||
|     pkg_name = qualified_class_name[:idx] | ||||
|  | ||||
|     _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir) | ||||
|     module = _load_spec(_find_spec(module_name, path), module_name) | ||||
|     return getattr(module, class_name) | ||||
|  | ||||
|  | ||||
| def cmdclass( | ||||
|     values: Dict[str, str], | ||||
|     package_dir: Optional[Mapping[str, str]] = None, | ||||
|     root_dir: Optional[_Path] = None | ||||
| ) -> Dict[str, Callable]: | ||||
|     """Given a dictionary mapping command names to strings for qualified class | ||||
|     names, apply :func:`resolve_class` to the dict values. | ||||
|     """ | ||||
|     return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()} | ||||
|  | ||||
|  | ||||
| def find_packages( | ||||
|     *, | ||||
|     namespaces=True, | ||||
|     fill_package_dir: Optional[Dict[str, str]] = None, | ||||
|     root_dir: Optional[_Path] = None, | ||||
|     **kwargs | ||||
| ) -> List[str]: | ||||
|     """Works similarly to :func:`setuptools.find_packages`, but with all | ||||
|     arguments given as keyword arguments. Moreover, ``where`` can be given | ||||
|     as a list (the results will be simply concatenated). | ||||
|  | ||||
|     When the additional keyword argument ``namespaces`` is ``True``, it will | ||||
|     behave like :func:`setuptools.find_namespace_packages`` (i.e. include | ||||
|     implicit namespaces as per :pep:`420`). | ||||
|  | ||||
|     The ``where`` argument will be considered relative to ``root_dir`` (or the current | ||||
|     working directory when ``root_dir`` is not given). | ||||
|  | ||||
|     If the ``fill_package_dir`` argument is passed, this function will consider it as a | ||||
|     similar data structure to the ``package_dir`` configuration parameter add fill-in | ||||
|     any missing package location. | ||||
|  | ||||
|     :rtype: list | ||||
|     """ | ||||
|     from setuptools.discovery import construct_package_dir | ||||
|     from setuptools.extern.more_itertools import unique_everseen, always_iterable | ||||
|  | ||||
|     if namespaces: | ||||
|         from setuptools.discovery import PEP420PackageFinder as PackageFinder | ||||
|     else: | ||||
|         from setuptools.discovery import PackageFinder  # type: ignore | ||||
|  | ||||
|     root_dir = root_dir or os.curdir | ||||
|     where = kwargs.pop('where', ['.']) | ||||
|     packages: List[str] = [] | ||||
|     fill_package_dir = {} if fill_package_dir is None else fill_package_dir | ||||
|     search = list(unique_everseen(always_iterable(where))) | ||||
|  | ||||
|     if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)): | ||||
|         fill_package_dir.setdefault("", search[0]) | ||||
|  | ||||
|     for path in search: | ||||
|         package_path = _nest_path(root_dir, path) | ||||
|         pkgs = PackageFinder.find(package_path, **kwargs) | ||||
|         packages.extend(pkgs) | ||||
|         if pkgs and not ( | ||||
|             fill_package_dir.get("") == path | ||||
|             or os.path.samefile(package_path, root_dir) | ||||
|         ): | ||||
|             fill_package_dir.update(construct_package_dir(pkgs, path)) | ||||
|  | ||||
|     return packages | ||||
|  | ||||
|  | ||||
| def _nest_path(parent: _Path, path: _Path) -> str: | ||||
|     path = parent if path in {".", ""} else os.path.join(parent, path) | ||||
|     return os.path.normpath(path) | ||||
|  | ||||
|  | ||||
| def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str: | ||||
|     """When getting the version directly from an attribute, | ||||
|     it should be normalised to string. | ||||
|     """ | ||||
|     if callable(value): | ||||
|         value = value() | ||||
|  | ||||
|     value = cast(Iterable[Union[str, int]], value) | ||||
|  | ||||
|     if not isinstance(value, str): | ||||
|         if hasattr(value, '__iter__'): | ||||
|             value = '.'.join(map(str, value)) | ||||
|         else: | ||||
|             value = '%s' % value | ||||
|  | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def canonic_package_data(package_data: dict) -> dict: | ||||
|     if "*" in package_data: | ||||
|         package_data[""] = package_data.pop("*") | ||||
|     return package_data | ||||
|  | ||||
|  | ||||
| def canonic_data_files( | ||||
|     data_files: Union[list, dict], root_dir: Optional[_Path] = None | ||||
| ) -> List[Tuple[str, List[str]]]: | ||||
|     """For compatibility with ``setup.py``, ``data_files`` should be a list | ||||
|     of pairs instead of a dict. | ||||
|  | ||||
|     This function also expands glob patterns. | ||||
|     """ | ||||
|     if isinstance(data_files, list): | ||||
|         return data_files | ||||
|  | ||||
|     return [ | ||||
|         (dest, glob_relative(patterns, root_dir)) | ||||
|         for dest, patterns in data_files.items() | ||||
|     ] | ||||
|  | ||||
|  | ||||
| def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]: | ||||
|     """Given the contents of entry-points file, | ||||
|     process it into a 2-level dictionary (``dict[str, dict[str, str]]``). | ||||
|     The first level keys are entry-point groups, the second level keys are | ||||
|     entry-point names, and the second level values are references to objects | ||||
|     (that correspond to the entry-point value). | ||||
|     """ | ||||
|     parser = ConfigParser(default_section=None, delimiters=("=",))  # type: ignore | ||||
|     parser.optionxform = str  # case sensitive | ||||
|     parser.read_string(text, text_source) | ||||
|     groups = {k: dict(v.items()) for k, v in parser.items()} | ||||
|     groups.pop(parser.default_section, None) | ||||
|     return groups | ||||
|  | ||||
|  | ||||
| class EnsurePackagesDiscovered: | ||||
|     """Some expand functions require all the packages to already be discovered before | ||||
|     they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`. | ||||
|  | ||||
|     Therefore in some cases we will need to run autodiscovery during the evaluation of | ||||
|     the configuration. However, it is better to postpone calling package discovery as | ||||
|     much as possible, because some parameters can influence it (e.g. ``package_dir``), | ||||
|     and those might not have been processed yet. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, distribution: "Distribution"): | ||||
|         self._dist = distribution | ||||
|         self._called = False | ||||
|  | ||||
|     def __call__(self): | ||||
|         """Trigger the automatic package discovery, if it is still necessary.""" | ||||
|         if not self._called: | ||||
|             self._called = True | ||||
|             self._dist.set_defaults(name=False)  # Skip name, we can still be parsing | ||||
|  | ||||
|     def __enter__(self): | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, _exc_type, _exc_value, _traceback): | ||||
|         if self._called: | ||||
|             self._dist.set_defaults.analyse_name()  # Now we can set a default name | ||||
|  | ||||
|     def _get_package_dir(self) -> Mapping[str, str]: | ||||
|         self() | ||||
|         pkg_dir = self._dist.package_dir | ||||
|         return {} if pkg_dir is None else pkg_dir | ||||
|  | ||||
|     @property | ||||
|     def package_dir(self) -> Mapping[str, str]: | ||||
|         """Proxy to ``package_dir`` that may trigger auto-discovery when used.""" | ||||
|         return LazyMappingProxy(self._get_package_dir) | ||||
|  | ||||
|  | ||||
| class LazyMappingProxy(Mapping[_K, _V]): | ||||
|     """Mapping proxy that delays resolving the target object, until really needed. | ||||
|  | ||||
|     >>> def obtain_mapping(): | ||||
|     ...     print("Running expensive function!") | ||||
|     ...     return {"key": "value", "other key": "other value"} | ||||
|     >>> mapping = LazyMappingProxy(obtain_mapping) | ||||
|     >>> mapping["key"] | ||||
|     Running expensive function! | ||||
|     'value' | ||||
|     >>> mapping["other key"] | ||||
|     'other value' | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]): | ||||
|         self._obtain = obtain_mapping_value | ||||
|         self._value: Optional[Mapping[_K, _V]] = None | ||||
|  | ||||
|     def _target(self) -> Mapping[_K, _V]: | ||||
|         if self._value is None: | ||||
|             self._value = self._obtain() | ||||
|         return self._value | ||||
|  | ||||
|     def __getitem__(self, key: _K) -> _V: | ||||
|         return self._target()[key] | ||||
|  | ||||
|     def __len__(self) -> int: | ||||
|         return len(self._target()) | ||||
|  | ||||
|     def __iter__(self) -> Iterator[_K]: | ||||
|         return iter(self._target()) | ||||
| @@ -0,0 +1,498 @@ | ||||
| """ | ||||
| Load setuptools configuration from ``pyproject.toml`` files. | ||||
|  | ||||
| **PRIVATE MODULE**: API reserved for setuptools internal usage only. | ||||
| """ | ||||
| import logging | ||||
| import os | ||||
| import warnings | ||||
| from contextlib import contextmanager | ||||
| from functools import partial | ||||
| from typing import TYPE_CHECKING, Callable, Dict, Optional, Mapping, Set, Union | ||||
|  | ||||
| from setuptools.errors import FileError, OptionError | ||||
|  | ||||
| from . import expand as _expand | ||||
| from ._apply_pyprojecttoml import apply as _apply | ||||
| from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _WouldIgnoreField | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from setuptools.dist import Distribution  # noqa | ||||
|  | ||||
| _Path = Union[str, os.PathLike] | ||||
| _logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def load_file(filepath: _Path) -> dict: | ||||
|     from setuptools.extern import tomli  # type: ignore | ||||
|  | ||||
|     with open(filepath, "rb") as file: | ||||
|         return tomli.load(file) | ||||
|  | ||||
|  | ||||
| def validate(config: dict, filepath: _Path) -> bool: | ||||
|     from . import _validate_pyproject as validator | ||||
|  | ||||
|     trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier") | ||||
|     if hasattr(trove_classifier, "_disable_download"): | ||||
|         # Improve reproducibility by default. See issue 31 for validate-pyproject. | ||||
|         trove_classifier._disable_download()  # type: ignore | ||||
|  | ||||
|     try: | ||||
|         return validator.validate(config) | ||||
|     except validator.ValidationError as ex: | ||||
|         summary = f"configuration error: {ex.summary}" | ||||
|         if ex.name.strip("`") != "project": | ||||
|             # Probably it is just a field missing/misnamed, not worthy the verbosity... | ||||
|             _logger.debug(summary) | ||||
|             _logger.debug(ex.details) | ||||
|  | ||||
|         error = f"invalid pyproject.toml config: {ex.name}." | ||||
|         raise ValueError(f"{error}\n{summary}") from None | ||||
|  | ||||
|  | ||||
| def apply_configuration( | ||||
|     dist: "Distribution", | ||||
|     filepath: _Path, | ||||
|     ignore_option_errors=False, | ||||
| ) -> "Distribution": | ||||
|     """Apply the configuration from a ``pyproject.toml`` file into an existing | ||||
|     distribution object. | ||||
|     """ | ||||
|     config = read_configuration(filepath, True, ignore_option_errors, dist) | ||||
|     return _apply(dist, config, filepath) | ||||
|  | ||||
|  | ||||
| def read_configuration( | ||||
|     filepath: _Path, | ||||
|     expand=True, | ||||
|     ignore_option_errors=False, | ||||
|     dist: Optional["Distribution"] = None, | ||||
| ): | ||||
|     """Read given configuration file and returns options from it as a dict. | ||||
|  | ||||
|     :param str|unicode filepath: Path to configuration file in the ``pyproject.toml`` | ||||
|         format. | ||||
|  | ||||
|     :param bool expand: Whether to expand directives and other computed values | ||||
|         (i.e. post-process the given configuration) | ||||
|  | ||||
|     :param bool ignore_option_errors: Whether to silently ignore | ||||
|         options, values of which could not be resolved (e.g. due to exceptions | ||||
|         in directives such as file:, attr:, etc.). | ||||
|         If False exceptions are propagated as expected. | ||||
|  | ||||
|     :param Distribution|None: Distribution object to which the configuration refers. | ||||
|         If not given a dummy object will be created and discarded after the | ||||
|         configuration is read. This is used for auto-discovery of packages and in the | ||||
|         case a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded. | ||||
|         When ``expand=False`` this object is simply ignored. | ||||
|  | ||||
|     :rtype: dict | ||||
|     """ | ||||
|     filepath = os.path.abspath(filepath) | ||||
|  | ||||
|     if not os.path.isfile(filepath): | ||||
|         raise FileError(f"Configuration file {filepath!r} does not exist.") | ||||
|  | ||||
|     asdict = load_file(filepath) or {} | ||||
|     project_table = asdict.get("project", {}) | ||||
|     tool_table = asdict.get("tool", {}) | ||||
|     setuptools_table = tool_table.get("setuptools", {}) | ||||
|     if not asdict or not (project_table or setuptools_table): | ||||
|         return {}  # User is not using pyproject to configure setuptools | ||||
|  | ||||
|     if setuptools_table: | ||||
|         # TODO: Remove the following once the feature stabilizes: | ||||
|         msg = "Support for `[tool.setuptools]` in `pyproject.toml` is still *beta*." | ||||
|         warnings.warn(msg, _BetaConfiguration) | ||||
|  | ||||
|     # There is an overall sense in the community that making include_package_data=True | ||||
|     # the default would be an improvement. | ||||
|     # `ini2toml` backfills include_package_data=False when nothing is explicitly given, | ||||
|     # therefore setting a default here is backwards compatible. | ||||
|     orig_setuptools_table = setuptools_table.copy() | ||||
|     if dist and getattr(dist, "include_package_data", None) is not None: | ||||
|         setuptools_table.setdefault("include-package-data", dist.include_package_data) | ||||
|     else: | ||||
|         setuptools_table.setdefault("include-package-data", True) | ||||
|     # Persist changes: | ||||
|     asdict["tool"] = tool_table | ||||
|     tool_table["setuptools"] = setuptools_table | ||||
|  | ||||
|     try: | ||||
|         # Don't complain about unrelated errors (e.g. tools not using the "tool" table) | ||||
|         subset = {"project": project_table, "tool": {"setuptools": setuptools_table}} | ||||
|         validate(subset, filepath) | ||||
|     except Exception as ex: | ||||
|         # TODO: Remove the following once the feature stabilizes: | ||||
|         if _skip_bad_config(project_table, orig_setuptools_table, dist): | ||||
|             return {} | ||||
|         # TODO: After the previous statement is removed the try/except can be replaced | ||||
|         # by the _ignore_errors context manager. | ||||
|         if ignore_option_errors: | ||||
|             _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}") | ||||
|         else: | ||||
|             raise  # re-raise exception | ||||
|  | ||||
|     if expand: | ||||
|         root_dir = os.path.dirname(filepath) | ||||
|         return expand_configuration(asdict, root_dir, ignore_option_errors, dist) | ||||
|  | ||||
|     return asdict | ||||
|  | ||||
|  | ||||
| def _skip_bad_config( | ||||
|     project_cfg: dict, setuptools_cfg: dict, dist: Optional["Distribution"] | ||||
| ) -> bool: | ||||
|     """Be temporarily forgiving with invalid ``pyproject.toml``""" | ||||
|     # See pypa/setuptools#3199 and pypa/cibuildwheel#1064 | ||||
|  | ||||
|     if dist is None or ( | ||||
|         dist.metadata.name is None | ||||
|         and dist.metadata.version is None | ||||
|         and dist.install_requires is None | ||||
|     ): | ||||
|         # It seems that the build is not getting any configuration from other places | ||||
|         return False | ||||
|  | ||||
|     if setuptools_cfg: | ||||
|         # If `[tool.setuptools]` is set, then `pyproject.toml` config is intentional | ||||
|         return False | ||||
|  | ||||
|     given_config = set(project_cfg.keys()) | ||||
|     popular_subset = {"name", "version", "python_requires", "requires-python"} | ||||
|     if given_config <= popular_subset: | ||||
|         # It seems that the docs in cibuildtool has been inadvertently encouraging users | ||||
|         # to create `pyproject.toml` files that are not compliant with the standards. | ||||
|         # Let's be forgiving for the time being. | ||||
|         warnings.warn(_InvalidFile.message(), _InvalidFile, stacklevel=2) | ||||
|         return True | ||||
|  | ||||
|     return False | ||||
|  | ||||
|  | ||||
| def expand_configuration( | ||||
|     config: dict, | ||||
|     root_dir: Optional[_Path] = None, | ||||
|     ignore_option_errors: bool = False, | ||||
|     dist: Optional["Distribution"] = None, | ||||
| ) -> dict: | ||||
|     """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...) | ||||
|     find their final values. | ||||
|  | ||||
|     :param dict config: Dict containing the configuration for the distribution | ||||
|     :param str root_dir: Top-level directory for the distribution/project | ||||
|         (the same directory where ``pyproject.toml`` is place) | ||||
|     :param bool ignore_option_errors: see :func:`read_configuration` | ||||
|     :param Distribution|None: Distribution object to which the configuration refers. | ||||
|         If not given a dummy object will be created and discarded after the | ||||
|         configuration is read. Used in the case a dynamic configuration | ||||
|         (e.g. ``attr`` or ``cmdclass``). | ||||
|  | ||||
|     :rtype: dict | ||||
|     """ | ||||
|     return _ConfigExpander(config, root_dir, ignore_option_errors, dist).expand() | ||||
|  | ||||
|  | ||||
| class _ConfigExpander: | ||||
|     def __init__( | ||||
|         self, | ||||
|         config: dict, | ||||
|         root_dir: Optional[_Path] = None, | ||||
|         ignore_option_errors: bool = False, | ||||
|         dist: Optional["Distribution"] = None, | ||||
|     ): | ||||
|         self.config = config | ||||
|         self.root_dir = root_dir or os.getcwd() | ||||
|         self.project_cfg = config.get("project", {}) | ||||
|         self.dynamic = self.project_cfg.get("dynamic", []) | ||||
|         self.setuptools_cfg = config.get("tool", {}).get("setuptools", {}) | ||||
|         self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {}) | ||||
|         self.ignore_option_errors = ignore_option_errors | ||||
|         self._dist = dist | ||||
|         self._referenced_files: Set[str] = set() | ||||
|  | ||||
|     def _ensure_dist(self) -> "Distribution": | ||||
|         from setuptools.dist import Distribution | ||||
|  | ||||
|         attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)} | ||||
|         return self._dist or Distribution(attrs) | ||||
|  | ||||
|     def _process_field(self, container: dict, field: str, fn: Callable): | ||||
|         if field in container: | ||||
|             with _ignore_errors(self.ignore_option_errors): | ||||
|                 container[field] = fn(container[field]) | ||||
|  | ||||
|     def _canonic_package_data(self, field="package-data"): | ||||
|         package_data = self.setuptools_cfg.get(field, {}) | ||||
|         return _expand.canonic_package_data(package_data) | ||||
|  | ||||
|     def expand(self): | ||||
|         self._expand_packages() | ||||
|         self._canonic_package_data() | ||||
|         self._canonic_package_data("exclude-package-data") | ||||
|  | ||||
|         # A distribution object is required for discovering the correct package_dir | ||||
|         dist = self._ensure_dist() | ||||
|         ctx = _EnsurePackagesDiscovered(dist, self.project_cfg, self.setuptools_cfg) | ||||
|         with ctx as ensure_discovered: | ||||
|             package_dir = ensure_discovered.package_dir | ||||
|             self._expand_data_files() | ||||
|             self._expand_cmdclass(package_dir) | ||||
|             self._expand_all_dynamic(dist, package_dir) | ||||
|  | ||||
|         dist._referenced_files.update(self._referenced_files) | ||||
|         return self.config | ||||
|  | ||||
|     def _expand_packages(self): | ||||
|         packages = self.setuptools_cfg.get("packages") | ||||
|         if packages is None or isinstance(packages, (list, tuple)): | ||||
|             return | ||||
|  | ||||
|         find = packages.get("find") | ||||
|         if isinstance(find, dict): | ||||
|             find["root_dir"] = self.root_dir | ||||
|             find["fill_package_dir"] = self.setuptools_cfg.setdefault("package-dir", {}) | ||||
|             with _ignore_errors(self.ignore_option_errors): | ||||
|                 self.setuptools_cfg["packages"] = _expand.find_packages(**find) | ||||
|  | ||||
|     def _expand_data_files(self): | ||||
|         data_files = partial(_expand.canonic_data_files, root_dir=self.root_dir) | ||||
|         self._process_field(self.setuptools_cfg, "data-files", data_files) | ||||
|  | ||||
|     def _expand_cmdclass(self, package_dir: Mapping[str, str]): | ||||
|         root_dir = self.root_dir | ||||
|         cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir) | ||||
|         self._process_field(self.setuptools_cfg, "cmdclass", cmdclass) | ||||
|  | ||||
|     def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]): | ||||
|         special = (  # need special handling | ||||
|             "version", | ||||
|             "readme", | ||||
|             "entry-points", | ||||
|             "scripts", | ||||
|             "gui-scripts", | ||||
|             "classifiers", | ||||
|             "dependencies", | ||||
|             "optional-dependencies", | ||||
|         ) | ||||
|         # `_obtain` functions are assumed to raise appropriate exceptions/warnings. | ||||
|         obtained_dynamic = { | ||||
|             field: self._obtain(dist, field, package_dir) | ||||
|             for field in self.dynamic | ||||
|             if field not in special | ||||
|         } | ||||
|         obtained_dynamic.update( | ||||
|             self._obtain_entry_points(dist, package_dir) or {}, | ||||
|             version=self._obtain_version(dist, package_dir), | ||||
|             readme=self._obtain_readme(dist), | ||||
|             classifiers=self._obtain_classifiers(dist), | ||||
|             dependencies=self._obtain_dependencies(dist), | ||||
|             optional_dependencies=self._obtain_optional_dependencies(dist), | ||||
|         ) | ||||
|         # `None` indicates there is nothing in `tool.setuptools.dynamic` but the value | ||||
|         # might have already been set by setup.py/extensions, so avoid overwriting. | ||||
|         updates = {k: v for k, v in obtained_dynamic.items() if v is not None} | ||||
|         self.project_cfg.update(updates) | ||||
|  | ||||
|     def _ensure_previously_set(self, dist: "Distribution", field: str): | ||||
|         previous = _PREVIOUSLY_DEFINED[field](dist) | ||||
|         if previous is None and not self.ignore_option_errors: | ||||
|             msg = ( | ||||
|                 f"No configuration found for dynamic {field!r}.\n" | ||||
|                 "Some dynamic fields need to be specified via `tool.setuptools.dynamic`" | ||||
|                 "\nothers must be specified via the equivalent attribute in `setup.py`." | ||||
|             ) | ||||
|             raise OptionError(msg) | ||||
|  | ||||
|     def _expand_directive( | ||||
|         self, specifier: str, directive, package_dir: Mapping[str, str] | ||||
|     ): | ||||
|         from setuptools.extern.more_itertools import always_iterable  # type: ignore | ||||
|  | ||||
|         with _ignore_errors(self.ignore_option_errors): | ||||
|             root_dir = self.root_dir | ||||
|             if "file" in directive: | ||||
|                 self._referenced_files.update(always_iterable(directive["file"])) | ||||
|                 return _expand.read_files(directive["file"], root_dir) | ||||
|             if "attr" in directive: | ||||
|                 return _expand.read_attr(directive["attr"], package_dir, root_dir) | ||||
|             raise ValueError(f"invalid `{specifier}`: {directive!r}") | ||||
|         return None | ||||
|  | ||||
|     def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]): | ||||
|         if field in self.dynamic_cfg: | ||||
|             return self._expand_directive( | ||||
|                 f"tool.setuptools.dynamic.{field}", | ||||
|                 self.dynamic_cfg[field], | ||||
|                 package_dir, | ||||
|             ) | ||||
|         self._ensure_previously_set(dist, field) | ||||
|         return None | ||||
|  | ||||
|     def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]): | ||||
|         # Since plugins can set version, let's silently skip if it cannot be obtained | ||||
|         if "version" in self.dynamic and "version" in self.dynamic_cfg: | ||||
|             return _expand.version(self._obtain(dist, "version", package_dir)) | ||||
|         return None | ||||
|  | ||||
|     def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]: | ||||
|         if "readme" not in self.dynamic: | ||||
|             return None | ||||
|  | ||||
|         dynamic_cfg = self.dynamic_cfg | ||||
|         if "readme" in dynamic_cfg: | ||||
|             return { | ||||
|                 "text": self._obtain(dist, "readme", {}), | ||||
|                 "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"), | ||||
|             } | ||||
|  | ||||
|         self._ensure_previously_set(dist, "readme") | ||||
|         return None | ||||
|  | ||||
|     def _obtain_entry_points( | ||||
|         self, dist: "Distribution", package_dir: Mapping[str, str] | ||||
|     ) -> Optional[Dict[str, dict]]: | ||||
|         fields = ("entry-points", "scripts", "gui-scripts") | ||||
|         if not any(field in self.dynamic for field in fields): | ||||
|             return None | ||||
|  | ||||
|         text = self._obtain(dist, "entry-points", package_dir) | ||||
|         if text is None: | ||||
|             return None | ||||
|  | ||||
|         groups = _expand.entry_points(text) | ||||
|         expanded = {"entry-points": groups} | ||||
|  | ||||
|         def _set_scripts(field: str, group: str): | ||||
|             if group in groups: | ||||
|                 value = groups.pop(group) | ||||
|                 if field not in self.dynamic: | ||||
|                     msg = _WouldIgnoreField.message(field, value) | ||||
|                     warnings.warn(msg, _WouldIgnoreField) | ||||
|                 # TODO: Don't set field when support for pyproject.toml stabilizes | ||||
|                 #       instead raise an error as specified in PEP 621 | ||||
|                 expanded[field] = value | ||||
|  | ||||
|         _set_scripts("scripts", "console_scripts") | ||||
|         _set_scripts("gui-scripts", "gui_scripts") | ||||
|  | ||||
|         return expanded | ||||
|  | ||||
|     def _obtain_classifiers(self, dist: "Distribution"): | ||||
|         if "classifiers" in self.dynamic: | ||||
|             value = self._obtain(dist, "classifiers", {}) | ||||
|             if value: | ||||
|                 return value.splitlines() | ||||
|         return None | ||||
|  | ||||
|     def _obtain_dependencies(self, dist: "Distribution"): | ||||
|         if "dependencies" in self.dynamic: | ||||
|             value = self._obtain(dist, "dependencies", {}) | ||||
|             if value: | ||||
|                 return _parse_requirements_list(value) | ||||
|         return None | ||||
|  | ||||
|     def _obtain_optional_dependencies(self, dist: "Distribution"): | ||||
|         if "optional-dependencies" not in self.dynamic: | ||||
|             return None | ||||
|         if "optional-dependencies" in self.dynamic_cfg: | ||||
|             optional_dependencies_map = self.dynamic_cfg["optional-dependencies"] | ||||
|             assert isinstance(optional_dependencies_map, dict) | ||||
|             return { | ||||
|                 group: _parse_requirements_list(self._expand_directive( | ||||
|                     f"tool.setuptools.dynamic.optional-dependencies.{group}", | ||||
|                     directive, | ||||
|                     {}, | ||||
|                 )) | ||||
|                 for group, directive in optional_dependencies_map.items() | ||||
|             } | ||||
|         self._ensure_previously_set(dist, "optional-dependencies") | ||||
|         return None | ||||
|  | ||||
|  | ||||
| def _parse_requirements_list(value): | ||||
|     return [ | ||||
|         line | ||||
|         for line in value.splitlines() | ||||
|         if line.strip() and not line.strip().startswith("#") | ||||
|     ] | ||||
|  | ||||
|  | ||||
| @contextmanager | ||||
| def _ignore_errors(ignore_option_errors: bool): | ||||
|     if not ignore_option_errors: | ||||
|         yield | ||||
|         return | ||||
|  | ||||
|     try: | ||||
|         yield | ||||
|     except Exception as ex: | ||||
|         _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}") | ||||
|  | ||||
|  | ||||
| class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered): | ||||
|     def __init__( | ||||
|         self, distribution: "Distribution", project_cfg: dict, setuptools_cfg: dict | ||||
|     ): | ||||
|         super().__init__(distribution) | ||||
|         self._project_cfg = project_cfg | ||||
|         self._setuptools_cfg = setuptools_cfg | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """When entering the context, the values of ``packages``, ``py_modules`` and | ||||
|         ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``. | ||||
|         """ | ||||
|         dist, cfg = self._dist, self._setuptools_cfg | ||||
|         package_dir: Dict[str, str] = cfg.setdefault("package-dir", {}) | ||||
|         package_dir.update(dist.package_dir or {}) | ||||
|         dist.package_dir = package_dir  # needs to be the same object | ||||
|  | ||||
|         dist.set_defaults._ignore_ext_modules()  # pyproject.toml-specific behaviour | ||||
|  | ||||
|         # Set `name`, `py_modules` and `packages` in dist to short-circuit | ||||
|         # auto-discovery, but avoid overwriting empty lists purposefully set by users. | ||||
|         if dist.metadata.name is None: | ||||
|             dist.metadata.name = self._project_cfg.get("name") | ||||
|         if dist.py_modules is None: | ||||
|             dist.py_modules = cfg.get("py-modules") | ||||
|         if dist.packages is None: | ||||
|             dist.packages = cfg.get("packages") | ||||
|  | ||||
|         return super().__enter__() | ||||
|  | ||||
|     def __exit__(self, exc_type, exc_value, traceback): | ||||
|         """When exiting the context, if values of ``packages``, ``py_modules`` and | ||||
|         ``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``. | ||||
|         """ | ||||
|         # If anything was discovered set them back, so they count in the final config. | ||||
|         self._setuptools_cfg.setdefault("packages", self._dist.packages) | ||||
|         self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules) | ||||
|         return super().__exit__(exc_type, exc_value, traceback) | ||||
|  | ||||
|  | ||||
| class _BetaConfiguration(UserWarning): | ||||
|     """Explicitly inform users that some `pyproject.toml` configuration is *beta*""" | ||||
|  | ||||
|  | ||||
| class _InvalidFile(UserWarning): | ||||
|     """The given `pyproject.toml` file is invalid and would be ignored. | ||||
|     !!\n\n | ||||
|     ############################ | ||||
|     # Invalid `pyproject.toml` # | ||||
|     ############################ | ||||
|  | ||||
|     Any configurations in `pyproject.toml` will be ignored. | ||||
|     Please note that future releases of setuptools will halt the build process | ||||
|     if an invalid file is given. | ||||
|  | ||||
|     To prevent setuptools from considering `pyproject.toml` please | ||||
|     DO NOT include the `[project]` or `[tool.setuptools]` tables in your file. | ||||
|     \n\n!! | ||||
|     """ | ||||
|  | ||||
|     @classmethod | ||||
|     def message(cls): | ||||
|         from inspect import cleandoc | ||||
|         return cleandoc(cls.__doc__) | ||||
							
								
								
									
										769
									
								
								venv/lib/python3.11/site-packages/setuptools/config/setupcfg.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										769
									
								
								venv/lib/python3.11/site-packages/setuptools/config/setupcfg.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,769 @@ | ||||
| """ | ||||
| Load setuptools configuration from ``setup.cfg`` files. | ||||
|  | ||||
| **API will be made private in the future** | ||||
| """ | ||||
| import os | ||||
|  | ||||
| import contextlib | ||||
| import functools | ||||
| import warnings | ||||
| from collections import defaultdict | ||||
| from functools import partial | ||||
| from functools import wraps | ||||
| from typing import (TYPE_CHECKING, Callable, Any, Dict, Generic, Iterable, List, | ||||
|                     Optional, Set, Tuple, TypeVar, Union) | ||||
|  | ||||
| from distutils.errors import DistutilsOptionError, DistutilsFileError | ||||
| from setuptools.extern.packaging.requirements import Requirement, InvalidRequirement | ||||
| from setuptools.extern.packaging.version import Version, InvalidVersion | ||||
| from setuptools.extern.packaging.specifiers import SpecifierSet | ||||
| from setuptools._deprecation_warning import SetuptoolsDeprecationWarning | ||||
|  | ||||
| from . import expand | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from setuptools.dist import Distribution  # noqa | ||||
|     from distutils.dist import DistributionMetadata  # noqa | ||||
|  | ||||
| _Path = Union[str, os.PathLike] | ||||
| SingleCommandOptions = Dict["str", Tuple["str", Any]] | ||||
| """Dict that associate the name of the options of a particular command to a | ||||
| tuple. The first element of the tuple indicates the origin of the option value | ||||
| (e.g. the name of the configuration file where it was read from), | ||||
| while the second element of the tuple is the option value itself | ||||
| """ | ||||
| AllCommandOptions = Dict["str", SingleCommandOptions]  # cmd name => its options | ||||
| Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"]) | ||||
|  | ||||
|  | ||||
| def read_configuration( | ||||
|     filepath: _Path, | ||||
|     find_others=False, | ||||
|     ignore_option_errors=False | ||||
| ) -> dict: | ||||
|     """Read given configuration file and returns options from it as a dict. | ||||
|  | ||||
|     :param str|unicode filepath: Path to configuration file | ||||
|         to get options from. | ||||
|  | ||||
|     :param bool find_others: Whether to search for other configuration files | ||||
|         which could be on in various places. | ||||
|  | ||||
|     :param bool ignore_option_errors: Whether to silently ignore | ||||
|         options, values of which could not be resolved (e.g. due to exceptions | ||||
|         in directives such as file:, attr:, etc.). | ||||
|         If False exceptions are propagated as expected. | ||||
|  | ||||
|     :rtype: dict | ||||
|     """ | ||||
|     from setuptools.dist import Distribution | ||||
|  | ||||
|     dist = Distribution() | ||||
|     filenames = dist.find_config_files() if find_others else [] | ||||
|     handlers = _apply(dist, filepath, filenames, ignore_option_errors) | ||||
|     return configuration_to_dict(handlers) | ||||
|  | ||||
|  | ||||
| def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution": | ||||
|     """Apply the configuration from a ``setup.cfg`` file into an existing | ||||
|     distribution object. | ||||
|     """ | ||||
|     _apply(dist, filepath) | ||||
|     dist._finalize_requires() | ||||
|     return dist | ||||
|  | ||||
|  | ||||
| def _apply( | ||||
|     dist: "Distribution", filepath: _Path, | ||||
|     other_files: Iterable[_Path] = (), | ||||
|     ignore_option_errors: bool = False, | ||||
| ) -> Tuple["ConfigHandler", ...]: | ||||
|     """Read configuration from ``filepath`` and applies to the ``dist`` object.""" | ||||
|     from setuptools.dist import _Distribution | ||||
|  | ||||
|     filepath = os.path.abspath(filepath) | ||||
|  | ||||
|     if not os.path.isfile(filepath): | ||||
|         raise DistutilsFileError('Configuration file %s does not exist.' % filepath) | ||||
|  | ||||
|     current_directory = os.getcwd() | ||||
|     os.chdir(os.path.dirname(filepath)) | ||||
|     filenames = [*other_files, filepath] | ||||
|  | ||||
|     try: | ||||
|         _Distribution.parse_config_files(dist, filenames=filenames) | ||||
|         handlers = parse_configuration( | ||||
|             dist, dist.command_options, ignore_option_errors=ignore_option_errors | ||||
|         ) | ||||
|         dist._finalize_license_files() | ||||
|     finally: | ||||
|         os.chdir(current_directory) | ||||
|  | ||||
|     return handlers | ||||
|  | ||||
|  | ||||
| def _get_option(target_obj: Target, key: str): | ||||
|     """ | ||||
|     Given a target object and option key, get that option from | ||||
|     the target object, either through a get_{key} method or | ||||
|     from an attribute directly. | ||||
|     """ | ||||
|     getter_name = 'get_{key}'.format(**locals()) | ||||
|     by_attribute = functools.partial(getattr, target_obj, key) | ||||
|     getter = getattr(target_obj, getter_name, by_attribute) | ||||
|     return getter() | ||||
|  | ||||
|  | ||||
| def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict: | ||||
|     """Returns configuration data gathered by given handlers as a dict. | ||||
|  | ||||
|     :param list[ConfigHandler] handlers: Handlers list, | ||||
|         usually from parse_configuration() | ||||
|  | ||||
|     :rtype: dict | ||||
|     """ | ||||
|     config_dict: dict = defaultdict(dict) | ||||
|  | ||||
|     for handler in handlers: | ||||
|         for option in handler.set_options: | ||||
|             value = _get_option(handler.target_obj, option) | ||||
|             config_dict[handler.section_prefix][option] = value | ||||
|  | ||||
|     return config_dict | ||||
|  | ||||
|  | ||||
| def parse_configuration( | ||||
|     distribution: "Distribution", | ||||
|     command_options: AllCommandOptions, | ||||
|     ignore_option_errors=False | ||||
| ) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]: | ||||
|     """Performs additional parsing of configuration options | ||||
|     for a distribution. | ||||
|  | ||||
|     Returns a list of used option handlers. | ||||
|  | ||||
|     :param Distribution distribution: | ||||
|     :param dict command_options: | ||||
|     :param bool ignore_option_errors: Whether to silently ignore | ||||
|         options, values of which could not be resolved (e.g. due to exceptions | ||||
|         in directives such as file:, attr:, etc.). | ||||
|         If False exceptions are propagated as expected. | ||||
|     :rtype: list | ||||
|     """ | ||||
|     with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered: | ||||
|         options = ConfigOptionsHandler( | ||||
|             distribution, | ||||
|             command_options, | ||||
|             ignore_option_errors, | ||||
|             ensure_discovered, | ||||
|         ) | ||||
|  | ||||
|         options.parse() | ||||
|         if not distribution.package_dir: | ||||
|             distribution.package_dir = options.package_dir  # Filled by `find_packages` | ||||
|  | ||||
|         meta = ConfigMetadataHandler( | ||||
|             distribution.metadata, | ||||
|             command_options, | ||||
|             ignore_option_errors, | ||||
|             ensure_discovered, | ||||
|             distribution.package_dir, | ||||
|             distribution.src_root, | ||||
|         ) | ||||
|         meta.parse() | ||||
|         distribution._referenced_files.update( | ||||
|             options._referenced_files, meta._referenced_files | ||||
|         ) | ||||
|  | ||||
|     return meta, options | ||||
|  | ||||
|  | ||||
| def _warn_accidental_env_marker_misconfig(label: str, orig_value: str, parsed: list): | ||||
|     """Because users sometimes misinterpret this configuration: | ||||
|  | ||||
|     [options.extras_require] | ||||
|     foo = bar;python_version<"4" | ||||
|  | ||||
|     It looks like one requirement with an environment marker | ||||
|     but because there is no newline, it's parsed as two requirements | ||||
|     with a semicolon as separator. | ||||
|  | ||||
|     Therefore, if: | ||||
|         * input string does not contain a newline AND | ||||
|         * parsed result contains two requirements AND | ||||
|         * parsing of the two parts from the result ("<first>;<second>") | ||||
|         leads in a valid Requirement with a valid marker | ||||
|     a UserWarning is shown to inform the user about the possible problem. | ||||
|     """ | ||||
|     if "\n" in orig_value or len(parsed) != 2: | ||||
|         return | ||||
|  | ||||
|     with contextlib.suppress(InvalidRequirement): | ||||
|         original_requirements_str = ";".join(parsed) | ||||
|         req = Requirement(original_requirements_str) | ||||
|         if req.marker is not None: | ||||
|             msg = ( | ||||
|                 f"One of the parsed requirements in `{label}` " | ||||
|                 f"looks like a valid environment marker: '{parsed[1]}'\n" | ||||
|                 "Make sure that the config is correct and check " | ||||
|                 "https://setuptools.pypa.io/en/latest/userguide/declarative_config.html#opt-2"  # noqa: E501 | ||||
|             ) | ||||
|             warnings.warn(msg, UserWarning) | ||||
|  | ||||
|  | ||||
| class ConfigHandler(Generic[Target]): | ||||
|     """Handles metadata supplied in configuration files.""" | ||||
|  | ||||
|     section_prefix: str | ||||
|     """Prefix for config sections handled by this handler. | ||||
|     Must be provided by class heirs. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     aliases: Dict[str, str] = {} | ||||
|     """Options aliases. | ||||
|     For compatibility with various packages. E.g.: d2to1 and pbr. | ||||
|     Note: `-` in keys is replaced with `_` by config parser. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         target_obj: Target, | ||||
|         options: AllCommandOptions, | ||||
|         ignore_option_errors, | ||||
|         ensure_discovered: expand.EnsurePackagesDiscovered, | ||||
|     ): | ||||
|         sections: AllCommandOptions = {} | ||||
|  | ||||
|         section_prefix = self.section_prefix | ||||
|         for section_name, section_options in options.items(): | ||||
|             if not section_name.startswith(section_prefix): | ||||
|                 continue | ||||
|  | ||||
|             section_name = section_name.replace(section_prefix, '').strip('.') | ||||
|             sections[section_name] = section_options | ||||
|  | ||||
|         self.ignore_option_errors = ignore_option_errors | ||||
|         self.target_obj = target_obj | ||||
|         self.sections = sections | ||||
|         self.set_options: List[str] = [] | ||||
|         self.ensure_discovered = ensure_discovered | ||||
|         self._referenced_files: Set[str] = set() | ||||
|         """After parsing configurations, this property will enumerate | ||||
|         all files referenced by the "file:" directive. Private API for setuptools only. | ||||
|         """ | ||||
|  | ||||
|     @property | ||||
|     def parsers(self): | ||||
|         """Metadata item name to parser function mapping.""" | ||||
|         raise NotImplementedError( | ||||
|             '%s must provide .parsers property' % self.__class__.__name__ | ||||
|         ) | ||||
|  | ||||
|     def __setitem__(self, option_name, value): | ||||
|         unknown = tuple() | ||||
|         target_obj = self.target_obj | ||||
|  | ||||
|         # Translate alias into real name. | ||||
|         option_name = self.aliases.get(option_name, option_name) | ||||
|  | ||||
|         current_value = getattr(target_obj, option_name, unknown) | ||||
|  | ||||
|         if current_value is unknown: | ||||
|             raise KeyError(option_name) | ||||
|  | ||||
|         if current_value: | ||||
|             # Already inhabited. Skipping. | ||||
|             return | ||||
|  | ||||
|         skip_option = False | ||||
|         parser = self.parsers.get(option_name) | ||||
|         if parser: | ||||
|             try: | ||||
|                 value = parser(value) | ||||
|  | ||||
|             except Exception: | ||||
|                 skip_option = True | ||||
|                 if not self.ignore_option_errors: | ||||
|                     raise | ||||
|  | ||||
|         if skip_option: | ||||
|             return | ||||
|  | ||||
|         setter = getattr(target_obj, 'set_%s' % option_name, None) | ||||
|         if setter is None: | ||||
|             setattr(target_obj, option_name, value) | ||||
|         else: | ||||
|             setter(value) | ||||
|  | ||||
|         self.set_options.append(option_name) | ||||
|  | ||||
|     @classmethod | ||||
|     def _parse_list(cls, value, separator=','): | ||||
|         """Represents value as a list. | ||||
|  | ||||
|         Value is split either by separator (defaults to comma) or by lines. | ||||
|  | ||||
|         :param value: | ||||
|         :param separator: List items separator character. | ||||
|         :rtype: list | ||||
|         """ | ||||
|         if isinstance(value, list):  # _get_parser_compound case | ||||
|             return value | ||||
|  | ||||
|         if '\n' in value: | ||||
|             value = value.splitlines() | ||||
|         else: | ||||
|             value = value.split(separator) | ||||
|  | ||||
|         return [chunk.strip() for chunk in value if chunk.strip()] | ||||
|  | ||||
|     @classmethod | ||||
|     def _parse_dict(cls, value): | ||||
|         """Represents value as a dict. | ||||
|  | ||||
|         :param value: | ||||
|         :rtype: dict | ||||
|         """ | ||||
|         separator = '=' | ||||
|         result = {} | ||||
|         for line in cls._parse_list(value): | ||||
|             key, sep, val = line.partition(separator) | ||||
|             if sep != separator: | ||||
|                 raise DistutilsOptionError( | ||||
|                     'Unable to parse option value to dict: %s' % value | ||||
|                 ) | ||||
|             result[key.strip()] = val.strip() | ||||
|  | ||||
|         return result | ||||
|  | ||||
|     @classmethod | ||||
|     def _parse_bool(cls, value): | ||||
|         """Represents value as boolean. | ||||
|  | ||||
|         :param value: | ||||
|         :rtype: bool | ||||
|         """ | ||||
|         value = value.lower() | ||||
|         return value in ('1', 'true', 'yes') | ||||
|  | ||||
|     @classmethod | ||||
|     def _exclude_files_parser(cls, key): | ||||
|         """Returns a parser function to make sure field inputs | ||||
|         are not files. | ||||
|  | ||||
|         Parses a value after getting the key so error messages are | ||||
|         more informative. | ||||
|  | ||||
|         :param key: | ||||
|         :rtype: callable | ||||
|         """ | ||||
|  | ||||
|         def parser(value): | ||||
|             exclude_directive = 'file:' | ||||
|             if value.startswith(exclude_directive): | ||||
|                 raise ValueError( | ||||
|                     'Only strings are accepted for the {0} field, ' | ||||
|                     'files are not accepted'.format(key) | ||||
|                 ) | ||||
|             return value | ||||
|  | ||||
|         return parser | ||||
|  | ||||
|     def _parse_file(self, value, root_dir: _Path): | ||||
|         """Represents value as a string, allowing including text | ||||
|         from nearest files using `file:` directive. | ||||
|  | ||||
|         Directive is sandboxed and won't reach anything outside | ||||
|         directory with setup.py. | ||||
|  | ||||
|         Examples: | ||||
|             file: README.rst, CHANGELOG.md, src/file.txt | ||||
|  | ||||
|         :param str value: | ||||
|         :rtype: str | ||||
|         """ | ||||
|         include_directive = 'file:' | ||||
|  | ||||
|         if not isinstance(value, str): | ||||
|             return value | ||||
|  | ||||
|         if not value.startswith(include_directive): | ||||
|             return value | ||||
|  | ||||
|         spec = value[len(include_directive) :] | ||||
|         filepaths = [path.strip() for path in spec.split(',')] | ||||
|         self._referenced_files.update(filepaths) | ||||
|         return expand.read_files(filepaths, root_dir) | ||||
|  | ||||
|     def _parse_attr(self, value, package_dir, root_dir: _Path): | ||||
|         """Represents value as a module attribute. | ||||
|  | ||||
|         Examples: | ||||
|             attr: package.attr | ||||
|             attr: package.module.attr | ||||
|  | ||||
|         :param str value: | ||||
|         :rtype: str | ||||
|         """ | ||||
|         attr_directive = 'attr:' | ||||
|         if not value.startswith(attr_directive): | ||||
|             return value | ||||
|  | ||||
|         attr_desc = value.replace(attr_directive, '') | ||||
|  | ||||
|         # Make sure package_dir is populated correctly, so `attr:` directives can work | ||||
|         package_dir.update(self.ensure_discovered.package_dir) | ||||
|         return expand.read_attr(attr_desc, package_dir, root_dir) | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_parser_compound(cls, *parse_methods): | ||||
|         """Returns parser function to represents value as a list. | ||||
|  | ||||
|         Parses a value applying given methods one after another. | ||||
|  | ||||
|         :param parse_methods: | ||||
|         :rtype: callable | ||||
|         """ | ||||
|  | ||||
|         def parse(value): | ||||
|             parsed = value | ||||
|  | ||||
|             for method in parse_methods: | ||||
|                 parsed = method(parsed) | ||||
|  | ||||
|             return parsed | ||||
|  | ||||
|         return parse | ||||
|  | ||||
|     @classmethod | ||||
|     def _parse_section_to_dict_with_key(cls, section_options, values_parser): | ||||
|         """Parses section options into a dictionary. | ||||
|  | ||||
|         Applies a given parser to each option in a section. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         :param callable values_parser: function with 2 args corresponding to key, value | ||||
|         :rtype: dict | ||||
|         """ | ||||
|         value = {} | ||||
|         for key, (_, val) in section_options.items(): | ||||
|             value[key] = values_parser(key, val) | ||||
|         return value | ||||
|  | ||||
|     @classmethod | ||||
|     def _parse_section_to_dict(cls, section_options, values_parser=None): | ||||
|         """Parses section options into a dictionary. | ||||
|  | ||||
|         Optionally applies a given parser to each value. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         :param callable values_parser: function with 1 arg corresponding to option value | ||||
|         :rtype: dict | ||||
|         """ | ||||
|         parser = (lambda _, v: values_parser(v)) if values_parser else (lambda _, v: v) | ||||
|         return cls._parse_section_to_dict_with_key(section_options, parser) | ||||
|  | ||||
|     def parse_section(self, section_options): | ||||
|         """Parses configuration file section. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         """ | ||||
|         for (name, (_, value)) in section_options.items(): | ||||
|             with contextlib.suppress(KeyError): | ||||
|                 # Keep silent for a new option may appear anytime. | ||||
|                 self[name] = value | ||||
|  | ||||
|     def parse(self): | ||||
|         """Parses configuration file items from one | ||||
|         or more related sections. | ||||
|  | ||||
|         """ | ||||
|         for section_name, section_options in self.sections.items(): | ||||
|  | ||||
|             method_postfix = '' | ||||
|             if section_name:  # [section.option] variant | ||||
|                 method_postfix = '_%s' % section_name | ||||
|  | ||||
|             section_parser_method: Optional[Callable] = getattr( | ||||
|                 self, | ||||
|                 # Dots in section names are translated into dunderscores. | ||||
|                 ('parse_section%s' % method_postfix).replace('.', '__'), | ||||
|                 None, | ||||
|             ) | ||||
|  | ||||
|             if section_parser_method is None: | ||||
|                 raise DistutilsOptionError( | ||||
|                     'Unsupported distribution option section: [%s.%s]' | ||||
|                     % (self.section_prefix, section_name) | ||||
|                 ) | ||||
|  | ||||
|             section_parser_method(section_options) | ||||
|  | ||||
|     def _deprecated_config_handler(self, func, msg, warning_class): | ||||
|         """this function will wrap around parameters that are deprecated | ||||
|  | ||||
|         :param msg: deprecation message | ||||
|         :param warning_class: class of warning exception to be raised | ||||
|         :param func: function to be wrapped around | ||||
|         """ | ||||
|  | ||||
|         @wraps(func) | ||||
|         def config_handler(*args, **kwargs): | ||||
|             warnings.warn(msg, warning_class) | ||||
|             return func(*args, **kwargs) | ||||
|  | ||||
|         return config_handler | ||||
|  | ||||
|  | ||||
| class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]): | ||||
|  | ||||
|     section_prefix = 'metadata' | ||||
|  | ||||
|     aliases = { | ||||
|         'home_page': 'url', | ||||
|         'summary': 'description', | ||||
|         'classifier': 'classifiers', | ||||
|         'platform': 'platforms', | ||||
|     } | ||||
|  | ||||
|     strict_mode = False | ||||
|     """We need to keep it loose, to be partially compatible with | ||||
|     `pbr` and `d2to1` packages which also uses `metadata` section. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         target_obj: "DistributionMetadata", | ||||
|         options: AllCommandOptions, | ||||
|         ignore_option_errors: bool, | ||||
|         ensure_discovered: expand.EnsurePackagesDiscovered, | ||||
|         package_dir: Optional[dict] = None, | ||||
|         root_dir: _Path = os.curdir | ||||
|     ): | ||||
|         super().__init__(target_obj, options, ignore_option_errors, ensure_discovered) | ||||
|         self.package_dir = package_dir | ||||
|         self.root_dir = root_dir | ||||
|  | ||||
|     @property | ||||
|     def parsers(self): | ||||
|         """Metadata item name to parser function mapping.""" | ||||
|         parse_list = self._parse_list | ||||
|         parse_file = partial(self._parse_file, root_dir=self.root_dir) | ||||
|         parse_dict = self._parse_dict | ||||
|         exclude_files_parser = self._exclude_files_parser | ||||
|  | ||||
|         return { | ||||
|             'platforms': parse_list, | ||||
|             'keywords': parse_list, | ||||
|             'provides': parse_list, | ||||
|             'requires': self._deprecated_config_handler( | ||||
|                 parse_list, | ||||
|                 "The requires parameter is deprecated, please use " | ||||
|                 "install_requires for runtime dependencies.", | ||||
|                 SetuptoolsDeprecationWarning, | ||||
|             ), | ||||
|             'obsoletes': parse_list, | ||||
|             'classifiers': self._get_parser_compound(parse_file, parse_list), | ||||
|             'license': exclude_files_parser('license'), | ||||
|             'license_file': self._deprecated_config_handler( | ||||
|                 exclude_files_parser('license_file'), | ||||
|                 "The license_file parameter is deprecated, " | ||||
|                 "use license_files instead.", | ||||
|                 SetuptoolsDeprecationWarning, | ||||
|             ), | ||||
|             'license_files': parse_list, | ||||
|             'description': parse_file, | ||||
|             'long_description': parse_file, | ||||
|             'version': self._parse_version, | ||||
|             'project_urls': parse_dict, | ||||
|         } | ||||
|  | ||||
|     def _parse_version(self, value): | ||||
|         """Parses `version` option value. | ||||
|  | ||||
|         :param value: | ||||
|         :rtype: str | ||||
|  | ||||
|         """ | ||||
|         version = self._parse_file(value, self.root_dir) | ||||
|  | ||||
|         if version != value: | ||||
|             version = version.strip() | ||||
|             # Be strict about versions loaded from file because it's easy to | ||||
|             # accidentally include newlines and other unintended content | ||||
|             try: | ||||
|                 Version(version) | ||||
|             except InvalidVersion: | ||||
|                 tmpl = ( | ||||
|                     'Version loaded from {value} does not ' | ||||
|                     'comply with PEP 440: {version}' | ||||
|                 ) | ||||
|                 raise DistutilsOptionError(tmpl.format(**locals())) | ||||
|  | ||||
|             return version | ||||
|  | ||||
|         return expand.version(self._parse_attr(value, self.package_dir, self.root_dir)) | ||||
|  | ||||
|  | ||||
| class ConfigOptionsHandler(ConfigHandler["Distribution"]): | ||||
|  | ||||
|     section_prefix = 'options' | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         target_obj: "Distribution", | ||||
|         options: AllCommandOptions, | ||||
|         ignore_option_errors: bool, | ||||
|         ensure_discovered: expand.EnsurePackagesDiscovered, | ||||
|     ): | ||||
|         super().__init__(target_obj, options, ignore_option_errors, ensure_discovered) | ||||
|         self.root_dir = target_obj.src_root | ||||
|         self.package_dir: Dict[str, str] = {}  # To be filled by `find_packages` | ||||
|  | ||||
|     @classmethod | ||||
|     def _parse_list_semicolon(cls, value): | ||||
|         return cls._parse_list(value, separator=';') | ||||
|  | ||||
|     def _parse_file_in_root(self, value): | ||||
|         return self._parse_file(value, root_dir=self.root_dir) | ||||
|  | ||||
|     def _parse_requirements_list(self, label: str, value: str): | ||||
|         # Parse a requirements list, either by reading in a `file:`, or a list. | ||||
|         parsed = self._parse_list_semicolon(self._parse_file_in_root(value)) | ||||
|         _warn_accidental_env_marker_misconfig(label, value, parsed) | ||||
|         # Filter it to only include lines that are not comments. `parse_list` | ||||
|         # will have stripped each line and filtered out empties. | ||||
|         return [line for line in parsed if not line.startswith("#")] | ||||
|  | ||||
|     @property | ||||
|     def parsers(self): | ||||
|         """Metadata item name to parser function mapping.""" | ||||
|         parse_list = self._parse_list | ||||
|         parse_bool = self._parse_bool | ||||
|         parse_dict = self._parse_dict | ||||
|         parse_cmdclass = self._parse_cmdclass | ||||
|  | ||||
|         return { | ||||
|             'zip_safe': parse_bool, | ||||
|             'include_package_data': parse_bool, | ||||
|             'package_dir': parse_dict, | ||||
|             'scripts': parse_list, | ||||
|             'eager_resources': parse_list, | ||||
|             'dependency_links': parse_list, | ||||
|             'namespace_packages': self._deprecated_config_handler( | ||||
|                 parse_list, | ||||
|                 "The namespace_packages parameter is deprecated, " | ||||
|                 "consider using implicit namespaces instead (PEP 420).", | ||||
|                 SetuptoolsDeprecationWarning, | ||||
|             ), | ||||
|             'install_requires': partial( | ||||
|                 self._parse_requirements_list, "install_requires" | ||||
|             ), | ||||
|             'setup_requires': self._parse_list_semicolon, | ||||
|             'tests_require': self._parse_list_semicolon, | ||||
|             'packages': self._parse_packages, | ||||
|             'entry_points': self._parse_file_in_root, | ||||
|             'py_modules': parse_list, | ||||
|             'python_requires': SpecifierSet, | ||||
|             'cmdclass': parse_cmdclass, | ||||
|         } | ||||
|  | ||||
|     def _parse_cmdclass(self, value): | ||||
|         package_dir = self.ensure_discovered.package_dir | ||||
|         return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir) | ||||
|  | ||||
|     def _parse_packages(self, value): | ||||
|         """Parses `packages` option value. | ||||
|  | ||||
|         :param value: | ||||
|         :rtype: list | ||||
|         """ | ||||
|         find_directives = ['find:', 'find_namespace:'] | ||||
|         trimmed_value = value.strip() | ||||
|  | ||||
|         if trimmed_value not in find_directives: | ||||
|             return self._parse_list(value) | ||||
|  | ||||
|         # Read function arguments from a dedicated section. | ||||
|         find_kwargs = self.parse_section_packages__find( | ||||
|             self.sections.get('packages.find', {}) | ||||
|         ) | ||||
|  | ||||
|         find_kwargs.update( | ||||
|             namespaces=(trimmed_value == find_directives[1]), | ||||
|             root_dir=self.root_dir, | ||||
|             fill_package_dir=self.package_dir, | ||||
|         ) | ||||
|  | ||||
|         return expand.find_packages(**find_kwargs) | ||||
|  | ||||
|     def parse_section_packages__find(self, section_options): | ||||
|         """Parses `packages.find` configuration file section. | ||||
|  | ||||
|         To be used in conjunction with _parse_packages(). | ||||
|  | ||||
|         :param dict section_options: | ||||
|         """ | ||||
|         section_data = self._parse_section_to_dict(section_options, self._parse_list) | ||||
|  | ||||
|         valid_keys = ['where', 'include', 'exclude'] | ||||
|  | ||||
|         find_kwargs = dict( | ||||
|             [(k, v) for k, v in section_data.items() if k in valid_keys and v] | ||||
|         ) | ||||
|  | ||||
|         where = find_kwargs.get('where') | ||||
|         if where is not None: | ||||
|             find_kwargs['where'] = where[0]  # cast list to single val | ||||
|  | ||||
|         return find_kwargs | ||||
|  | ||||
|     def parse_section_entry_points(self, section_options): | ||||
|         """Parses `entry_points` configuration file section. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         """ | ||||
|         parsed = self._parse_section_to_dict(section_options, self._parse_list) | ||||
|         self['entry_points'] = parsed | ||||
|  | ||||
|     def _parse_package_data(self, section_options): | ||||
|         package_data = self._parse_section_to_dict(section_options, self._parse_list) | ||||
|         return expand.canonic_package_data(package_data) | ||||
|  | ||||
|     def parse_section_package_data(self, section_options): | ||||
|         """Parses `package_data` configuration file section. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         """ | ||||
|         self['package_data'] = self._parse_package_data(section_options) | ||||
|  | ||||
|     def parse_section_exclude_package_data(self, section_options): | ||||
|         """Parses `exclude_package_data` configuration file section. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         """ | ||||
|         self['exclude_package_data'] = self._parse_package_data(section_options) | ||||
|  | ||||
|     def parse_section_extras_require(self, section_options): | ||||
|         """Parses `extras_require` configuration file section. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         """ | ||||
|         parsed = self._parse_section_to_dict_with_key( | ||||
|             section_options, | ||||
|             lambda k, v: self._parse_requirements_list(f"extras_require[{k}]", v) | ||||
|         ) | ||||
|  | ||||
|         self['extras_require'] = parsed | ||||
|  | ||||
|     def parse_section_data_files(self, section_options): | ||||
|         """Parses `data_files` configuration file section. | ||||
|  | ||||
|         :param dict section_options: | ||||
|         """ | ||||
|         parsed = self._parse_section_to_dict(section_options, self._parse_list) | ||||
|         self['data_files'] = expand.canonic_data_files(parsed, self.root_dir) | ||||
		Reference in New Issue
	
	Block a user