test
This commit is contained in:
		| @ -0,0 +1,34 @@ | ||||
| from functools import reduce | ||||
| from typing import Any, Callable, Dict | ||||
|  | ||||
| from . import formats | ||||
| from .error_reporting import detailed_errors, ValidationError | ||||
| from .extra_validations import EXTRA_VALIDATIONS | ||||
| from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException | ||||
| from .fastjsonschema_validations import validate as _validate | ||||
|  | ||||
| __all__ = [ | ||||
|     "validate", | ||||
|     "FORMAT_FUNCTIONS", | ||||
|     "EXTRA_VALIDATIONS", | ||||
|     "ValidationError", | ||||
|     "JsonSchemaException", | ||||
|     "JsonSchemaValueException", | ||||
| ] | ||||
|  | ||||
|  | ||||
| FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = { | ||||
|     fn.__name__.replace("_", "-"): fn | ||||
|     for fn in formats.__dict__.values() | ||||
|     if callable(fn) and not fn.__name__.startswith("_") | ||||
| } | ||||
|  | ||||
|  | ||||
| def validate(data: Any) -> bool: | ||||
|     """Validate the given ``data`` object using JSON Schema | ||||
|     This function raises ``ValidationError`` if ``data`` is invalid. | ||||
|     """ | ||||
|     with detailed_errors(): | ||||
|         _validate(data, custom_formats=FORMAT_FUNCTIONS) | ||||
|     reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data) | ||||
|     return True | ||||
| @ -0,0 +1,318 @@ | ||||
| import io | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| from contextlib import contextmanager | ||||
| from textwrap import indent, wrap | ||||
| from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast | ||||
|  | ||||
| from .fastjsonschema_exceptions import JsonSchemaValueException | ||||
|  | ||||
| _logger = logging.getLogger(__name__) | ||||
|  | ||||
| _MESSAGE_REPLACEMENTS = { | ||||
|     "must be named by propertyName definition": "keys must be named by", | ||||
|     "one of contains definition": "at least one item that matches", | ||||
|     " same as const definition:": "", | ||||
|     "only specified items": "only items matching the definition", | ||||
| } | ||||
|  | ||||
| _SKIP_DETAILS = ( | ||||
|     "must not be empty", | ||||
|     "is always invalid", | ||||
|     "must not be there", | ||||
| ) | ||||
|  | ||||
| _NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"} | ||||
|  | ||||
| _CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)") | ||||
| _IDENTIFIER = re.compile(r"^[\w_]+$", re.I) | ||||
|  | ||||
| _TOML_JARGON = { | ||||
|     "object": "table", | ||||
|     "property": "key", | ||||
|     "properties": "keys", | ||||
|     "property names": "keys", | ||||
| } | ||||
|  | ||||
|  | ||||
| class ValidationError(JsonSchemaValueException): | ||||
|     """Report violations of a given JSON schema. | ||||
|  | ||||
|     This class extends :exc:`~fastjsonschema.JsonSchemaValueException` | ||||
|     by adding the following properties: | ||||
|  | ||||
|     - ``summary``: an improved version of the ``JsonSchemaValueException`` error message | ||||
|       with only the necessary information) | ||||
|  | ||||
|     - ``details``: more contextual information about the error like the failing schema | ||||
|       itself and the value that violates the schema. | ||||
|  | ||||
|     Depending on the level of the verbosity of the ``logging`` configuration | ||||
|     the exception message will be only ``summary`` (default) or a combination of | ||||
|     ``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`). | ||||
|     """ | ||||
|  | ||||
|     summary = "" | ||||
|     details = "" | ||||
|     _original_message = "" | ||||
|  | ||||
|     @classmethod | ||||
|     def _from_jsonschema(cls, ex: JsonSchemaValueException): | ||||
|         formatter = _ErrorFormatting(ex) | ||||
|         obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule) | ||||
|         debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower() | ||||
|         if debug_code != "false":  # pragma: no cover | ||||
|             obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__ | ||||
|         obj._original_message = ex.message | ||||
|         obj.summary = formatter.summary | ||||
|         obj.details = formatter.details | ||||
|         return obj | ||||
|  | ||||
|  | ||||
| @contextmanager | ||||
| def detailed_errors(): | ||||
|     try: | ||||
|         yield | ||||
|     except JsonSchemaValueException as ex: | ||||
|         raise ValidationError._from_jsonschema(ex) from None | ||||
|  | ||||
|  | ||||
| class _ErrorFormatting: | ||||
|     def __init__(self, ex: JsonSchemaValueException): | ||||
|         self.ex = ex | ||||
|         self.name = f"`{self._simplify_name(ex.name)}`" | ||||
|         self._original_message = self.ex.message.replace(ex.name, self.name) | ||||
|         self._summary = "" | ||||
|         self._details = "" | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         if _logger.getEffectiveLevel() <= logging.DEBUG and self.details: | ||||
|             return f"{self.summary}\n\n{self.details}" | ||||
|  | ||||
|         return self.summary | ||||
|  | ||||
|     @property | ||||
|     def summary(self) -> str: | ||||
|         if not self._summary: | ||||
|             self._summary = self._expand_summary() | ||||
|  | ||||
|         return self._summary | ||||
|  | ||||
|     @property | ||||
|     def details(self) -> str: | ||||
|         if not self._details: | ||||
|             self._details = self._expand_details() | ||||
|  | ||||
|         return self._details | ||||
|  | ||||
|     def _simplify_name(self, name): | ||||
|         x = len("data.") | ||||
|         return name[x:] if name.startswith("data.") else name | ||||
|  | ||||
|     def _expand_summary(self): | ||||
|         msg = self._original_message | ||||
|  | ||||
|         for bad, repl in _MESSAGE_REPLACEMENTS.items(): | ||||
|             msg = msg.replace(bad, repl) | ||||
|  | ||||
|         if any(substring in msg for substring in _SKIP_DETAILS): | ||||
|             return msg | ||||
|  | ||||
|         schema = self.ex.rule_definition | ||||
|         if self.ex.rule in _NEED_DETAILS and schema: | ||||
|             summary = _SummaryWriter(_TOML_JARGON) | ||||
|             return f"{msg}:\n\n{indent(summary(schema), '    ')}" | ||||
|  | ||||
|         return msg | ||||
|  | ||||
|     def _expand_details(self) -> str: | ||||
|         optional = [] | ||||
|         desc_lines = self.ex.definition.pop("$$description", []) | ||||
|         desc = self.ex.definition.pop("description", None) or " ".join(desc_lines) | ||||
|         if desc: | ||||
|             description = "\n".join( | ||||
|                 wrap( | ||||
|                     desc, | ||||
|                     width=80, | ||||
|                     initial_indent="    ", | ||||
|                     subsequent_indent="    ", | ||||
|                     break_long_words=False, | ||||
|                 ) | ||||
|             ) | ||||
|             optional.append(f"DESCRIPTION:\n{description}") | ||||
|         schema = json.dumps(self.ex.definition, indent=4) | ||||
|         value = json.dumps(self.ex.value, indent=4) | ||||
|         defaults = [ | ||||
|             f"GIVEN VALUE:\n{indent(value, '    ')}", | ||||
|             f"OFFENDING RULE: {self.ex.rule!r}", | ||||
|             f"DEFINITION:\n{indent(schema, '    ')}", | ||||
|         ] | ||||
|         return "\n\n".join(optional + defaults) | ||||
|  | ||||
|  | ||||
| class _SummaryWriter: | ||||
|     _IGNORE = {"description", "default", "title", "examples"} | ||||
|  | ||||
|     def __init__(self, jargon: Optional[Dict[str, str]] = None): | ||||
|         self.jargon: Dict[str, str] = jargon or {} | ||||
|         # Clarify confusing terms | ||||
|         self._terms = { | ||||
|             "anyOf": "at least one of the following", | ||||
|             "oneOf": "exactly one of the following", | ||||
|             "allOf": "all of the following", | ||||
|             "not": "(*NOT* the following)", | ||||
|             "prefixItems": f"{self._jargon('items')} (in order)", | ||||
|             "items": "items", | ||||
|             "contains": "contains at least one of", | ||||
|             "propertyNames": ( | ||||
|                 f"non-predefined acceptable {self._jargon('property names')}" | ||||
|             ), | ||||
|             "patternProperties": f"{self._jargon('properties')} named via pattern", | ||||
|             "const": "predefined value", | ||||
|             "enum": "one of", | ||||
|         } | ||||
|         # Attributes that indicate that the definition is easy and can be done | ||||
|         # inline (e.g. string and number) | ||||
|         self._guess_inline_defs = [ | ||||
|             "enum", | ||||
|             "const", | ||||
|             "maxLength", | ||||
|             "minLength", | ||||
|             "pattern", | ||||
|             "format", | ||||
|             "minimum", | ||||
|             "maximum", | ||||
|             "exclusiveMinimum", | ||||
|             "exclusiveMaximum", | ||||
|             "multipleOf", | ||||
|         ] | ||||
|  | ||||
|     def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]: | ||||
|         if isinstance(term, list): | ||||
|             return [self.jargon.get(t, t) for t in term] | ||||
|         return self.jargon.get(term, term) | ||||
|  | ||||
|     def __call__( | ||||
|         self, | ||||
|         schema: Union[dict, List[dict]], | ||||
|         prefix: str = "", | ||||
|         *, | ||||
|         _path: Sequence[str] = (), | ||||
|     ) -> str: | ||||
|         if isinstance(schema, list): | ||||
|             return self._handle_list(schema, prefix, _path) | ||||
|  | ||||
|         filtered = self._filter_unecessary(schema, _path) | ||||
|         simple = self._handle_simple_dict(filtered, _path) | ||||
|         if simple: | ||||
|             return f"{prefix}{simple}" | ||||
|  | ||||
|         child_prefix = self._child_prefix(prefix, "  ") | ||||
|         item_prefix = self._child_prefix(prefix, "- ") | ||||
|         indent = len(prefix) * " " | ||||
|         with io.StringIO() as buffer: | ||||
|             for i, (key, value) in enumerate(filtered.items()): | ||||
|                 child_path = [*_path, key] | ||||
|                 line_prefix = prefix if i == 0 else indent | ||||
|                 buffer.write(f"{line_prefix}{self._label(child_path)}:") | ||||
|                 # ^  just the first item should receive the complete prefix | ||||
|                 if isinstance(value, dict): | ||||
|                     filtered = self._filter_unecessary(value, child_path) | ||||
|                     simple = self._handle_simple_dict(filtered, child_path) | ||||
|                     buffer.write( | ||||
|                         f" {simple}" | ||||
|                         if simple | ||||
|                         else f"\n{self(value, child_prefix, _path=child_path)}" | ||||
|                     ) | ||||
|                 elif isinstance(value, list) and ( | ||||
|                     key != "type" or self._is_property(child_path) | ||||
|                 ): | ||||
|                     children = self._handle_list(value, item_prefix, child_path) | ||||
|                     sep = " " if children.startswith("[") else "\n" | ||||
|                     buffer.write(f"{sep}{children}") | ||||
|                 else: | ||||
|                     buffer.write(f" {self._value(value, child_path)}\n") | ||||
|             return buffer.getvalue() | ||||
|  | ||||
|     def _is_unecessary(self, path: Sequence[str]) -> bool: | ||||
|         if self._is_property(path) or not path:  # empty path => instruction @ root | ||||
|             return False | ||||
|         key = path[-1] | ||||
|         return any(key.startswith(k) for k in "$_") or key in self._IGNORE | ||||
|  | ||||
|     def _filter_unecessary(self, schema: dict, path: Sequence[str]): | ||||
|         return { | ||||
|             key: value | ||||
|             for key, value in schema.items() | ||||
|             if not self._is_unecessary([*path, key]) | ||||
|         } | ||||
|  | ||||
|     def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]: | ||||
|         inline = any(p in value for p in self._guess_inline_defs) | ||||
|         simple = not any(isinstance(v, (list, dict)) for v in value.values()) | ||||
|         if inline or simple: | ||||
|             return f"{{{', '.join(self._inline_attrs(value, path))}}}\n" | ||||
|         return None | ||||
|  | ||||
|     def _handle_list( | ||||
|         self, schemas: list, prefix: str = "", path: Sequence[str] = () | ||||
|     ) -> str: | ||||
|         if self._is_unecessary(path): | ||||
|             return "" | ||||
|  | ||||
|         repr_ = repr(schemas) | ||||
|         if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60: | ||||
|             return f"{repr_}\n" | ||||
|  | ||||
|         item_prefix = self._child_prefix(prefix, "- ") | ||||
|         return "".join( | ||||
|             self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas) | ||||
|         ) | ||||
|  | ||||
|     def _is_property(self, path: Sequence[str]): | ||||
|         """Check if the given path can correspond to an arbitrarily named property""" | ||||
|         counter = 0 | ||||
|         for key in path[-2::-1]: | ||||
|             if key not in {"properties", "patternProperties"}: | ||||
|                 break | ||||
|             counter += 1 | ||||
|  | ||||
|         # If the counter if even, the path correspond to a JSON Schema keyword | ||||
|         # otherwise it can be any arbitrary string naming a property | ||||
|         return counter % 2 == 1 | ||||
|  | ||||
|     def _label(self, path: Sequence[str]) -> str: | ||||
|         *parents, key = path | ||||
|         if not self._is_property(path): | ||||
|             norm_key = _separate_terms(key) | ||||
|             return self._terms.get(key) or " ".join(self._jargon(norm_key)) | ||||
|  | ||||
|         if parents[-1] == "patternProperties": | ||||
|             return f"(regex {key!r})" | ||||
|         return repr(key)  # property name | ||||
|  | ||||
|     def _value(self, value: Any, path: Sequence[str]) -> str: | ||||
|         if path[-1] == "type" and not self._is_property(path): | ||||
|             type_ = self._jargon(value) | ||||
|             return ( | ||||
|                 f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_) | ||||
|             ) | ||||
|         return repr(value) | ||||
|  | ||||
|     def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]: | ||||
|         for key, value in schema.items(): | ||||
|             child_path = [*path, key] | ||||
|             yield f"{self._label(child_path)}: {self._value(value, child_path)}" | ||||
|  | ||||
|     def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str: | ||||
|         return len(parent_prefix) * " " + child_prefix | ||||
|  | ||||
|  | ||||
| def _separate_terms(word: str) -> List[str]: | ||||
|     """ | ||||
|     >>> _separate_terms("FooBar-foo") | ||||
|     ['foo', 'bar', 'foo'] | ||||
|     """ | ||||
|     return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w] | ||||
| @ -0,0 +1,36 @@ | ||||
| """The purpose of this module is implement PEP 621 validations that are | ||||
| difficult to express as a JSON Schema (or that are not supported by the current | ||||
| JSON Schema library). | ||||
| """ | ||||
|  | ||||
| from typing import Mapping, TypeVar | ||||
|  | ||||
| from .error_reporting import ValidationError | ||||
|  | ||||
| T = TypeVar("T", bound=Mapping) | ||||
|  | ||||
|  | ||||
| class RedefiningStaticFieldAsDynamic(ValidationError): | ||||
|     """According to PEP 621: | ||||
|  | ||||
|     Build back-ends MUST raise an error if the metadata specifies a field | ||||
|     statically as well as being listed in dynamic. | ||||
|     """ | ||||
|  | ||||
|  | ||||
| def validate_project_dynamic(pyproject: T) -> T: | ||||
|     project_table = pyproject.get("project", {}) | ||||
|     dynamic = project_table.get("dynamic", []) | ||||
|  | ||||
|     for field in dynamic: | ||||
|         if field in project_table: | ||||
|             msg = f"You cannot provide a value for `project.{field}` and " | ||||
|             msg += "list it under `project.dynamic` at the same time" | ||||
|             name = f"data.project.{field}" | ||||
|             value = {field: project_table[field], "...": " # ...", "dynamic": dynamic} | ||||
|             raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621") | ||||
|  | ||||
|     return pyproject | ||||
|  | ||||
|  | ||||
| EXTRA_VALIDATIONS = (validate_project_dynamic,) | ||||
| @ -0,0 +1,51 @@ | ||||
| import re | ||||
|  | ||||
|  | ||||
| SPLIT_RE = re.compile(r'[\.\[\]]+') | ||||
|  | ||||
|  | ||||
| class JsonSchemaException(ValueError): | ||||
|     """ | ||||
|     Base exception of ``fastjsonschema`` library. | ||||
|     """ | ||||
|  | ||||
|  | ||||
| class JsonSchemaValueException(JsonSchemaException): | ||||
|     """ | ||||
|     Exception raised by validation function. Available properties: | ||||
|  | ||||
|      * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``), | ||||
|      * invalid ``value`` (e.g. ``60``), | ||||
|      * ``name`` of a path in the data structure (e.g. ``data.property[index]``), | ||||
|      * ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``), | ||||
|      * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``), | ||||
|      * ``rule`` which the ``value`` is breaking (e.g. ``maximum``) | ||||
|      * and ``rule_definition`` (e.g. ``42``). | ||||
|  | ||||
|     .. versionchanged:: 2.14.0 | ||||
|         Added all extra properties. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, message, value=None, name=None, definition=None, rule=None): | ||||
|         super().__init__(message) | ||||
|         self.message = message | ||||
|         self.value = value | ||||
|         self.name = name | ||||
|         self.definition = definition | ||||
|         self.rule = rule | ||||
|  | ||||
|     @property | ||||
|     def path(self): | ||||
|         return [item for item in SPLIT_RE.split(self.name) if item != ''] | ||||
|  | ||||
|     @property | ||||
|     def rule_definition(self): | ||||
|         if not self.rule or not self.definition: | ||||
|             return None | ||||
|         return self.definition.get(self.rule) | ||||
|  | ||||
|  | ||||
| class JsonSchemaDefinitionException(JsonSchemaException): | ||||
|     """ | ||||
|     Exception raised by generator of validation function. | ||||
|     """ | ||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @ -0,0 +1,259 @@ | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import string | ||||
| import typing | ||||
| from itertools import chain as _chain | ||||
|  | ||||
| _logger = logging.getLogger(__name__) | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # PEP 440 | ||||
|  | ||||
| VERSION_PATTERN = r""" | ||||
|     v? | ||||
|     (?: | ||||
|         (?:(?P<epoch>[0-9]+)!)?                           # epoch | ||||
|         (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment | ||||
|         (?P<pre>                                          # pre-release | ||||
|             [-_\.]? | ||||
|             (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) | ||||
|             [-_\.]? | ||||
|             (?P<pre_n>[0-9]+)? | ||||
|         )? | ||||
|         (?P<post>                                         # post release | ||||
|             (?:-(?P<post_n1>[0-9]+)) | ||||
|             | | ||||
|             (?: | ||||
|                 [-_\.]? | ||||
|                 (?P<post_l>post|rev|r) | ||||
|                 [-_\.]? | ||||
|                 (?P<post_n2>[0-9]+)? | ||||
|             ) | ||||
|         )? | ||||
|         (?P<dev>                                          # dev release | ||||
|             [-_\.]? | ||||
|             (?P<dev_l>dev) | ||||
|             [-_\.]? | ||||
|             (?P<dev_n>[0-9]+)? | ||||
|         )? | ||||
|     ) | ||||
|     (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version | ||||
| """ | ||||
|  | ||||
| VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I) | ||||
|  | ||||
|  | ||||
| def pep440(version: str) -> bool: | ||||
|     return VERSION_REGEX.match(version) is not None | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # PEP 508 | ||||
|  | ||||
| PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])" | ||||
| PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I) | ||||
|  | ||||
|  | ||||
| def pep508_identifier(name: str) -> bool: | ||||
|     return PEP508_IDENTIFIER_REGEX.match(name) is not None | ||||
|  | ||||
|  | ||||
| try: | ||||
|     try: | ||||
|         from packaging import requirements as _req | ||||
|     except ImportError:  # pragma: no cover | ||||
|         # let's try setuptools vendored version | ||||
|         from setuptools._vendor.packaging import requirements as _req  # type: ignore | ||||
|  | ||||
|     def pep508(value: str) -> bool: | ||||
|         try: | ||||
|             _req.Requirement(value) | ||||
|             return True | ||||
|         except _req.InvalidRequirement: | ||||
|             return False | ||||
|  | ||||
| except ImportError:  # pragma: no cover | ||||
|     _logger.warning( | ||||
|         "Could not find an installation of `packaging`. Requirements, dependencies and " | ||||
|         "versions might not be validated. " | ||||
|         "To enforce validation, please install `packaging`." | ||||
|     ) | ||||
|  | ||||
|     def pep508(value: str) -> bool: | ||||
|         return True | ||||
|  | ||||
|  | ||||
| def pep508_versionspec(value: str) -> bool: | ||||
|     """Expression that can be used to specify/lock versions (including ranges)""" | ||||
|     if any(c in value for c in (";", "]", "@")): | ||||
|         # In PEP 508: | ||||
|         # conditional markers, extras and URL specs are not included in the | ||||
|         # versionspec | ||||
|         return False | ||||
|     # Let's pretend we have a dependency called `requirement` with the given | ||||
|     # version spec, then we can re-use the pep508 function for validation: | ||||
|     return pep508(f"requirement{value}") | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # PEP 517 | ||||
|  | ||||
|  | ||||
| def pep517_backend_reference(value: str) -> bool: | ||||
|     module, _, obj = value.partition(":") | ||||
|     identifiers = (i.strip() for i in _chain(module.split("."), obj.split("."))) | ||||
|     return all(python_identifier(i) for i in identifiers if i) | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # Classifiers - PEP 301 | ||||
|  | ||||
|  | ||||
| def _download_classifiers() -> str: | ||||
|     import ssl | ||||
|     from email.message import Message | ||||
|     from urllib.request import urlopen | ||||
|  | ||||
|     url = "https://pypi.org/pypi?:action=list_classifiers" | ||||
|     context = ssl.create_default_context() | ||||
|     with urlopen(url, context=context) as response: | ||||
|         headers = Message() | ||||
|         headers["content_type"] = response.getheader("content-type", "text/plain") | ||||
|         return response.read().decode(headers.get_param("charset", "utf-8")) | ||||
|  | ||||
|  | ||||
| class _TroveClassifier: | ||||
|     """The ``trove_classifiers`` package is the official way of validating classifiers, | ||||
|     however this package might not be always available. | ||||
|     As a workaround we can still download a list from PyPI. | ||||
|     We also don't want to be over strict about it, so simply skipping silently is an | ||||
|     option (classifiers will be validated anyway during the upload to PyPI). | ||||
|     """ | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.downloaded: typing.Union[None, False, typing.Set[str]] = None | ||||
|         self._skip_download = False | ||||
|         # None => not cached yet | ||||
|         # False => cache not available | ||||
|         self.__name__ = "trove_classifier"  # Emulate a public function | ||||
|  | ||||
|     def _disable_download(self): | ||||
|         # This is a private API. Only setuptools has the consent of using it. | ||||
|         self._skip_download = True | ||||
|  | ||||
|     def __call__(self, value: str) -> bool: | ||||
|         if self.downloaded is False or self._skip_download is True: | ||||
|             return True | ||||
|  | ||||
|         if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"): | ||||
|             self.downloaded = False | ||||
|             msg = ( | ||||
|                 "Install ``trove-classifiers`` to ensure proper validation. " | ||||
|                 "Skipping download of classifiers list from PyPI (NO_NETWORK)." | ||||
|             ) | ||||
|             _logger.debug(msg) | ||||
|             return True | ||||
|  | ||||
|         if self.downloaded is None: | ||||
|             msg = ( | ||||
|                 "Install ``trove-classifiers`` to ensure proper validation. " | ||||
|                 "Meanwhile a list of classifiers will be downloaded from PyPI." | ||||
|             ) | ||||
|             _logger.debug(msg) | ||||
|             try: | ||||
|                 self.downloaded = set(_download_classifiers().splitlines()) | ||||
|             except Exception: | ||||
|                 self.downloaded = False | ||||
|                 _logger.debug("Problem with download, skipping validation") | ||||
|                 return True | ||||
|  | ||||
|         return value in self.downloaded or value.lower().startswith("private ::") | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from trove_classifiers import classifiers as _trove_classifiers | ||||
|  | ||||
|     def trove_classifier(value: str) -> bool: | ||||
|         return value in _trove_classifiers or value.lower().startswith("private ::") | ||||
|  | ||||
| except ImportError:  # pragma: no cover | ||||
|     trove_classifier = _TroveClassifier() | ||||
|  | ||||
|  | ||||
| # ------------------------------------------------------------------------------------- | ||||
| # Non-PEP related | ||||
|  | ||||
|  | ||||
| def url(value: str) -> bool: | ||||
|     from urllib.parse import urlparse | ||||
|  | ||||
|     try: | ||||
|         parts = urlparse(value) | ||||
|         if not parts.scheme: | ||||
|             _logger.warning( | ||||
|                 "For maximum compatibility please make sure to include a " | ||||
|                 "`scheme` prefix in your URL (e.g. 'http://'). " | ||||
|                 f"Given value: {value}" | ||||
|             ) | ||||
|             if not (value.startswith("/") or value.startswith("\\") or "@" in value): | ||||
|                 parts = urlparse(f"http://{value}") | ||||
|  | ||||
|         return bool(parts.scheme and parts.netloc) | ||||
|     except Exception: | ||||
|         return False | ||||
|  | ||||
|  | ||||
| # https://packaging.python.org/specifications/entry-points/ | ||||
| ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?" | ||||
| ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I) | ||||
| RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+" | ||||
| RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I) | ||||
| ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*" | ||||
| ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I) | ||||
|  | ||||
|  | ||||
| def python_identifier(value: str) -> bool: | ||||
|     return value.isidentifier() | ||||
|  | ||||
|  | ||||
| def python_qualified_identifier(value: str) -> bool: | ||||
|     if value.startswith(".") or value.endswith("."): | ||||
|         return False | ||||
|     return all(python_identifier(m) for m in value.split(".")) | ||||
|  | ||||
|  | ||||
| def python_module_name(value: str) -> bool: | ||||
|     return python_qualified_identifier(value) | ||||
|  | ||||
|  | ||||
| def python_entrypoint_group(value: str) -> bool: | ||||
|     return ENTRYPOINT_GROUP_REGEX.match(value) is not None | ||||
|  | ||||
|  | ||||
| def python_entrypoint_name(value: str) -> bool: | ||||
|     if not ENTRYPOINT_REGEX.match(value): | ||||
|         return False | ||||
|     if not RECOMMEDED_ENTRYPOINT_REGEX.match(value): | ||||
|         msg = f"Entry point `{value}` does not follow recommended pattern: " | ||||
|         msg += RECOMMEDED_ENTRYPOINT_PATTERN | ||||
|         _logger.warning(msg) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| def python_entrypoint_reference(value: str) -> bool: | ||||
|     module, _, rest = value.partition(":") | ||||
|     if "[" in rest: | ||||
|         obj, _, extras_ = rest.partition("[") | ||||
|         if extras_.strip()[-1] != "]": | ||||
|             return False | ||||
|         extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(",")) | ||||
|         if not all(pep508_identifier(e) for e in extras): | ||||
|             return False | ||||
|         _logger.warning(f"`{value}` - using extras for entry points is not recommended") | ||||
|     else: | ||||
|         obj = rest | ||||
|  | ||||
|     module_parts = module.split(".") | ||||
|     identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts | ||||
|     return all(python_identifier(i.strip()) for i in identifiers) | ||||
		Reference in New Issue
	
	Block a user