validator¶
This module contains the ImplementationValidator
class that can be pointed at an OPTIMADE implementation and validated against the specification via the pydantic models implemented in this package.
ImplementationValidator
¶
Class used to make a series of checks against a particular OPTIMADE implementation over HTTP.
Uses the pydantic models in optimade.models
to validate the response from the server and crawl through the available endpoints.
Attributes:
Name | Type | Description |
---|---|---|
valid | Optional[bool] | whether or not the implementation was deemed valid, with |
Caution
Only works for current version of the specification as defined by optimade.models
.
__init__(self, client=None, base_url=None, verbosity=0, respond_json=False, page_limit=5, max_retries=5, run_optional_tests=True, fail_fast=False, as_type=None, index=False, minimal=False)
special
¶
Set up the tests to run, based on constants in this module for required endpoints.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
client | Any | A client that has a | None |
base_url | str | The URL of the implementation to validate. Unless performing "as_type" validation, this should point to the base of the OPTIMADE implementation. | None |
verbosity | int | The verbosity of the output and logging as an integer ( | 0 |
respond_json | bool | If | False |
page_limit | int | The default page limit to apply to filters. | 5 |
max_retries | int | Argument is passed to the client for how many attempts to make for a request before failing. | 5 |
run_optional_tests | bool | Whether to run the tests on optional OPTIMADE features. | True |
fail_fast | bool | Whether to exit validation after the first failure of a mandatory test. | False |
as_type | str | An OPTIMADE entry or endpoint type to coerce the response from implementation into, e.g. "structures". Requires | None |
index | bool | Whether to validate the implementation as an index meta-database. | False |
minimal | bool | Whether or not to run only a minimal test set. | False |
Source code in optimade/validator/validator.py
def __init__( # pylint: disable=too-many-arguments
self,
client: Any = None,
base_url: str = None,
verbosity: int = 0,
respond_json: bool = False,
page_limit: int = 5,
max_retries: int = 5,
run_optional_tests: bool = True,
fail_fast: bool = False,
as_type: str = None,
index: bool = False,
minimal: bool = False,
):
"""Set up the tests to run, based on constants in this module
for required endpoints.
Arguments:
client: A client that has a `.get()` method to obtain the
response from the implementation. If `None`, then
[`Client`][optimade.validator.utils.Client] will be used.
base_url: The URL of the implementation to validate. Unless
performing "as_type" validation, this should point to the
base of the OPTIMADE implementation.
verbosity: The verbosity of the output and logging as an integer
(`0`: critical, `1`: warning, `2`: info, `3`: debug).
respond_json: If `True`, print only a JSON representation of the
results of validation to stdout.
page_limit: The default page limit to apply to filters.
max_retries: Argument is passed to the client for how many
attempts to make for a request before failing.
run_optional_tests: Whether to run the tests on optional
OPTIMADE features.
fail_fast: Whether to exit validation after the first failure
of a mandatory test.
as_type: An OPTIMADE entry or endpoint type to coerce the response
from implementation into, e.g. "structures". Requires `base_url`
to be pointed to the corresponding endpoint.
index: Whether to validate the implementation as an index meta-database.
minimal: Whether or not to run only a minimal test set.
"""
self.verbosity = verbosity
self.max_retries = max_retries
self.page_limit = page_limit
self.index = index
self.run_optional_tests = run_optional_tests
self.fail_fast = fail_fast
self.respond_json = respond_json
self.minimal = minimal
if as_type is None:
self.as_type_cls = None
elif self.index:
if as_type not in CONF.response_classes_index:
raise RuntimeError(
f"Provided as_type='{as_type}' not allowed for an Index meta-database."
)
self.as_type_cls = CONF.response_classes_index[as_type]
elif as_type in ("structure", "reference"):
self.as_type_cls = CONF.response_classes[f"{as_type}s/"]
else:
self.as_type_cls = CONF.response_classes[as_type]
if client is None and base_url is None:
raise RuntimeError(
"Need at least a URL or a client to initialize validator."
)
if base_url and client:
raise RuntimeError("Please specify at most one of base_url or client.")
if client:
self.client = client
self.base_url = self.client.base_url
else:
while base_url.endswith("/"):
base_url = base_url[:-1]
self.base_url = base_url
self.client = Client(base_url, max_retries=self.max_retries)
self._setup_log()
self._response_classes = (
CONF.response_classes_index if self.index else CONF.response_classes
)
# some simple checks on base_url
self.base_url_parsed = urllib.parse.urlparse(self.base_url)
# only allow filters/endpoints if we are working in "as_type" mode
if self.as_type_cls is None and self.base_url_parsed.query:
raise SystemExit("Base URL not appropriate: should not contain a filter.")
self.valid = None
self._test_id_by_type = {}
self._entry_info_by_type = {}
self.results = ValidatorResults(verbosity=self.verbosity)
print_summary(self)
¶
Print a summary of the results of validation.
Source code in optimade/validator/validator.py
def print_summary(self):
""" Print a summary of the results of validation. """
if self.respond_json:
print(json.dumps(dataclasses.asdict(self.results), indent=2))
return
if self.results.failure_messages:
print("\n\nFAILURES")
print("========\n")
for message in self.results.failure_messages:
print_failure(message[0])
for line in message[1].split("\n"):
print_warning("\t" + line)
if self.results.optional_failure_messages:
print("\n\nOPTIONAL TEST FAILURES")
print("======================\n")
for message in self.results.optional_failure_messages:
print_notify(message[0])
for line in message[1].split("\n"):
print_warning("\t" + line)
if self.results.internal_failure_messages:
print("\n\nINTERNAL FAILURES")
print("=================\n")
print(
"There were internal validator failures associated with this run.\n"
"If this problem persists, please report it at:\n"
"https://github.com/Materials-Consortia/optimade-python-tools/issues/new\n"
)
for message in self.results.internal_failure_messages:
print_warning(message[0])
for line in message[1].split("\n"):
print_warning("\t" + line)
if self.valid or (not self.valid and not self.fail_fast):
final_message = f"\n\nPassed {self.results.success_count} out of {self.results.success_count + self.results.failure_count + self.results.internal_failure_count} tests."
if not self.valid:
print_failure(final_message)
else:
print_success(final_message)
if self.run_optional_tests and not self.fail_fast:
print(
f"Additionally passed {self.results.optional_success_count} out of "
f"{self.results.optional_success_count + self.results.optional_failure_count} optional tests."
)
validate_implementation(self)
¶
Run all the test cases on the implementation, or the single type test, depending on what options were provided on initialiation.
Sets the self.valid
attribute to True
or False
depending on the outcome of the tests.
Exceptions:
Type | Description |
---|---|
RuntimeError | If it was not possible to start the validation process. |
Source code in optimade/validator/validator.py
def validate_implementation(self):
"""Run all the test cases on the implementation, or the single type test,
depending on what options were provided on initialiation.
Sets the `self.valid` attribute to `True` or `False` depending on the
outcome of the tests.
Raises:
RuntimeError: If it was not possible to start the validation process.
"""
# If a single "as type" has been set, only run that test
if self.as_type_cls is not None:
self._log.debug(
"Validating response of %s with model %s",
self.base_url,
self.as_type_cls,
)
self._test_as_type()
self.valid = not bool(self.results.failure_count)
self.print_summary()
return
# Test entire implementation
if self.verbosity >= 0:
print(f"Testing entire implementation at {self.base_url}")
info_endp = CONF.info_endpoint
self._log.debug("Testing base info endpoint of %s", info_endp)
# Get and validate base info to find endpoints
# If this is not possible, then exit at this stage
base_info = self._test_info_or_links_endpoint(info_endp)
if not base_info:
self._log.critical(
f"Unable to deserialize response from introspective {info_endp!r} endpoint. "
"This is required for all further validation, so the validator will now exit."
)
# Set valid to False to ensure error code 1 is raised at CLI
self.valid = False
self.print_summary()
return
# Grab the provider prefix from base info and use it when looking for provider fields
self.provider_prefix = None
meta = base_info.get("meta", {})
if meta.get("provider") is not None:
self.provider_prefix = meta["provider"].get("prefix")
# Set the response class for all `/info/entry` endpoints based on `/info` response
self.available_json_endpoints, _ = self._get_available_endpoints(
base_info, request=info_endp
)
for endp in self.available_json_endpoints:
self._response_classes[f"{info_endp}/{endp}"] = EntryInfoResponse
# Run some tests on the versions endpoint
self._log.debug("Testing versions endpoint %s", CONF.versions_endpoint)
self._test_versions_endpoint()
self._test_bad_version_returns_553()
# Test that entry info endpoints deserialize correctly
# If they do not, the corresponding entry in _entry_info_by_type
# is set to False, which must be checked for further validation
for endp in self.available_json_endpoints:
entry_info_endpoint = f"{info_endp}/{endp}"
self._log.debug("Testing expected info endpoint %s", entry_info_endpoint)
self._entry_info_by_type[endp] = self._test_info_or_links_endpoint(
entry_info_endpoint
)
# Use the _entry_info_by_type to construct filters on the relevant endpoints
if not self.minimal:
for endp in self.available_json_endpoints:
self._log.debug("Testing queries on JSON entry endpoint of %s", endp)
self._recurse_through_endpoint(endp)
# Test that the results from multi-entry-endpoints obey, e.g. page limits,
# and that all entries can be deserialized with the patched models.
# These methods also set the test_ids for each type of entry, which are validated
# in the next loop.
for endp in self.available_json_endpoints:
self._log.debug("Testing multiple entry endpoint of %s", endp)
self._test_multi_entry_endpoint(endp)
# Test that the single IDs scraped earlier work with the single entry endpoint
for endp in self.available_json_endpoints:
self._log.debug("Testing single entry request of type %s", endp)
self._test_single_entry_endpoint(endp)
# Test that the links endpoint can be serialized correctly
self._log.debug("Testing %s endpoint", CONF.links_endpoint)
self._test_info_or_links_endpoint(CONF.links_endpoint)
self.valid = not (
self.results.failure_count or self.results.internal_failure_count
)
self.print_summary()