2023-01-31 15:42:16 +01:00
|
|
|
import decimal
|
2019-12-19 16:19:41 +01:00
|
|
|
import glob
|
|
|
|
import json
|
2022-02-11 22:22:59 +01:00
|
|
|
import os
|
2023-01-31 15:42:16 +01:00
|
|
|
from urllib.request import urlopen
|
|
|
|
|
2019-12-19 16:19:41 +01:00
|
|
|
import pytest
|
|
|
|
import yaml
|
2023-01-31 15:42:16 +01:00
|
|
|
from jsonschema import Draft4Validator, RefResolver
|
2019-12-19 17:08:51 +01:00
|
|
|
from jsonschema.exceptions import ValidationError
|
2023-01-31 15:42:16 +01:00
|
|
|
from yaml_loader import DecimalSafeLoader
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
SCHEMAS = (
|
|
|
|
('device-types', 'devicetype.json'),
|
|
|
|
('module-types', 'moduletype.json'),
|
|
|
|
)
|
|
|
|
|
2022-02-18 20:47:13 +01:00
|
|
|
COMPONENT_TYPES = (
|
|
|
|
'console-ports',
|
|
|
|
'console-server-ports',
|
|
|
|
'power-ports',
|
|
|
|
'power-outlets',
|
|
|
|
'interfaces',
|
|
|
|
'front-ports',
|
|
|
|
'rear-ports',
|
|
|
|
'device-bays',
|
|
|
|
'module-bays',
|
|
|
|
)
|
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
|
2019-12-19 16:19:41 +01:00
|
|
|
def _get_definition_files():
|
|
|
|
"""
|
2022-02-11 22:22:59 +01:00
|
|
|
Return a list of all definition files within the specified path.
|
2019-12-19 16:19:41 +01:00
|
|
|
"""
|
2022-02-11 22:22:59 +01:00
|
|
|
ret = []
|
|
|
|
|
|
|
|
for path, schema in SCHEMAS:
|
|
|
|
|
|
|
|
# Initialize the schema
|
|
|
|
with open(f"schema/{schema}") as schema_file:
|
2023-01-31 15:42:16 +01:00
|
|
|
schema = json.loads(schema_file.read(), parse_float=decimal.Decimal)
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
# Validate that the schema exists
|
|
|
|
assert schema, f"Schema definition for {path} is empty!"
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
# Map each definition file to its schema
|
2022-02-18 20:47:13 +01:00
|
|
|
for f in sorted(glob.glob(f"{path}/*/*", recursive=True)):
|
2022-02-11 22:22:59 +01:00
|
|
|
ret.append((f, schema))
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
definition_files = _get_definition_files()
|
2022-02-18 20:47:13 +01:00
|
|
|
known_slugs = set()
|
2019-12-19 16:19:41 +01:00
|
|
|
|
|
|
|
|
2023-01-31 15:42:16 +01:00
|
|
|
def _decimal_file_handler(uri):
|
|
|
|
with urlopen(uri) as url:
|
|
|
|
result = json.loads(url.read().decode("utf-8"), parse_float=decimal.Decimal)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2019-12-19 17:14:27 +01:00
|
|
|
def test_environment():
|
|
|
|
"""
|
|
|
|
Run basic sanity checks on the environment to ensure tests are running correctly.
|
|
|
|
"""
|
|
|
|
# Validate that definition files exist
|
2022-02-11 22:22:59 +01:00
|
|
|
assert definition_files, "No definition files found!"
|
2019-12-19 17:14:27 +01:00
|
|
|
|
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
@pytest.mark.parametrize(('file_path', 'schema'), definition_files)
|
|
|
|
def test_definitions(file_path, schema):
|
2019-12-19 16:19:41 +01:00
|
|
|
"""
|
2022-02-18 20:47:13 +01:00
|
|
|
Validate each definition file using the provided JSON schema and check for duplicate entries.
|
2019-12-19 16:19:41 +01:00
|
|
|
"""
|
2019-12-26 21:36:28 +01:00
|
|
|
# Check file extension
|
|
|
|
assert file_path.split('.')[-1] in ('yaml', 'yml'), f"Invalid file extension: {file_path}"
|
|
|
|
|
2019-12-19 16:19:41 +01:00
|
|
|
# Read file
|
|
|
|
with open(file_path) as definition_file:
|
2019-12-30 16:11:25 +01:00
|
|
|
content = definition_file.read()
|
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
# Check for trailing newline
|
|
|
|
assert content.endswith('\n'), "Missing trailing newline"
|
2019-12-30 16:11:25 +01:00
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
# Load YAML data from file
|
2023-01-31 15:42:16 +01:00
|
|
|
definition = yaml.load(content, Loader=DecimalSafeLoader)
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2022-02-18 20:47:13 +01:00
|
|
|
# Validate YAML definition against the supplied schema
|
2019-12-19 17:08:51 +01:00
|
|
|
try:
|
2023-01-31 15:42:16 +01:00
|
|
|
resolver = RefResolver(
|
|
|
|
f"file://{os.getcwd()}/schema/devicetype.json",
|
|
|
|
schema,
|
|
|
|
handlers={"file": _decimal_file_handler},
|
|
|
|
)
|
2022-02-11 22:22:59 +01:00
|
|
|
Draft4Validator(schema, resolver=resolver).validate(definition)
|
2019-12-19 17:08:51 +01:00
|
|
|
except ValidationError as e:
|
2020-11-24 19:25:06 +01:00
|
|
|
pytest.fail(f"{file_path} failed validation: {e}", False)
|
2022-02-18 20:47:13 +01:00
|
|
|
|
|
|
|
# Check for duplicate slug
|
|
|
|
if file_path.startswith('device-types/'):
|
|
|
|
slug = definition.get('slug')
|
|
|
|
if slug and slug in known_slugs:
|
|
|
|
pytest.fail(f'{file_path} device type has duplicate slug "{slug}"', False)
|
|
|
|
elif slug:
|
|
|
|
known_slugs.add(slug)
|
|
|
|
|
|
|
|
# Check for duplicate components
|
|
|
|
for component_type in COMPONENT_TYPES:
|
|
|
|
known_names = set()
|
|
|
|
defined_components = definition.get(component_type, [])
|
|
|
|
for idx, component in enumerate(defined_components):
|
|
|
|
name = component.get('name')
|
|
|
|
if name in known_names:
|
|
|
|
pytest.fail(f'Duplicate entry "{name}" in {component_type} list', False)
|
|
|
|
known_names.add(name)
|
2022-02-25 16:23:46 +01:00
|
|
|
|
|
|
|
# Check for empty quotes
|
|
|
|
def iterdict(var):
|
|
|
|
for dict_value in var.values():
|
|
|
|
if isinstance(dict_value, dict):
|
|
|
|
iterdict(dict_value)
|
|
|
|
if isinstance(dict_value, list):
|
|
|
|
iterlist(dict_value)
|
|
|
|
else:
|
|
|
|
if(isinstance(dict_value, str) and not dict_value):
|
|
|
|
pytest.fail(f'{file_path} has empty quotes', False)
|
|
|
|
|
|
|
|
def iterlist(var):
|
|
|
|
for list_value in var:
|
|
|
|
if isinstance(list_value, dict):
|
|
|
|
iterdict(list_value)
|
|
|
|
elif isinstance(list_value, list):
|
|
|
|
iterlist(list_value)
|
|
|
|
|
|
|
|
iterdict(definition)
|