2023-07-19 16:32:32 +02:00
|
|
|
from test_configuration import COMPONENT_TYPES, IMAGE_FILETYPES, SCHEMAS, KNOWN_SLUGS, ROOT_DIR, USE_LOCAL_KNOWN_SLUGS, NETBOX_DT_LIBRARY_URL
|
|
|
|
import pickle_operations
|
2023-07-13 02:00:44 +02:00
|
|
|
from yaml_loader import DecimalSafeLoader
|
2023-07-17 21:23:56 +02:00
|
|
|
from device_types import DeviceType, ModuleType, verify_filename, validate_components
|
2023-01-31 15:42:16 +01:00
|
|
|
import decimal
|
2019-12-19 16:19:41 +01:00
|
|
|
import glob
|
|
|
|
import json
|
2022-02-11 22:22:59 +01:00
|
|
|
import os
|
2023-07-19 16:32:32 +02:00
|
|
|
import tempfile
|
2023-01-31 15:42:16 +01:00
|
|
|
from urllib.request import urlopen
|
|
|
|
|
2019-12-19 16:19:41 +01:00
|
|
|
import pytest
|
|
|
|
import yaml
|
2023-01-31 15:42:16 +01:00
|
|
|
from jsonschema import Draft4Validator, RefResolver
|
2019-12-19 17:08:51 +01:00
|
|
|
from jsonschema.exceptions import ValidationError
|
2023-07-19 16:32:32 +02:00
|
|
|
from git import Repo
|
2022-02-11 22:22:59 +01:00
|
|
|
|
2019-12-19 16:19:41 +01:00
|
|
|
def _get_definition_files():
|
|
|
|
"""
|
2022-02-11 22:22:59 +01:00
|
|
|
Return a list of all definition files within the specified path.
|
2019-12-19 16:19:41 +01:00
|
|
|
"""
|
2023-07-13 02:00:44 +02:00
|
|
|
file_list = []
|
2022-02-11 22:22:59 +01:00
|
|
|
|
|
|
|
for path, schema in SCHEMAS:
|
|
|
|
# Initialize the schema
|
|
|
|
with open(f"schema/{schema}") as schema_file:
|
2023-01-31 15:42:16 +01:00
|
|
|
schema = json.loads(schema_file.read(), parse_float=decimal.Decimal)
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
# Validate that the schema exists
|
|
|
|
assert schema, f"Schema definition for {path} is empty!"
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2023-07-13 02:00:44 +02:00
|
|
|
# Map each definition file to its schema as a tuple (file, schema)
|
|
|
|
for file in sorted(glob.glob(f"{path}/*/*", recursive=True)):
|
|
|
|
file_list.append((file, schema))
|
2022-02-11 22:22:59 +01:00
|
|
|
|
2023-07-13 02:00:44 +02:00
|
|
|
return file_list
|
2022-02-11 22:22:59 +01:00
|
|
|
|
2023-07-19 16:32:32 +02:00
|
|
|
def _get_diff_from_upstream():
|
|
|
|
file_list = []
|
|
|
|
|
2023-07-19 17:04:51 +02:00
|
|
|
repo = Repo(f"{os.path.dirname(os.path.abspath(__file__))}/../")
|
2023-07-19 17:53:37 +02:00
|
|
|
commits_list = list(repo.iter_commits())
|
2023-07-19 16:45:01 +02:00
|
|
|
|
2023-07-19 17:04:51 +02:00
|
|
|
if "upstream" not in repo.remotes:
|
|
|
|
repo.create_remote("upstream", NETBOX_DT_LIBRARY_URL)
|
|
|
|
|
|
|
|
upstream = repo.remotes.upstream
|
2023-07-19 16:32:32 +02:00
|
|
|
upstream.fetch()
|
2023-07-19 17:53:37 +02:00
|
|
|
changes = upstream.refs.master.commit.diff(repo.head)
|
2023-07-19 16:32:32 +02:00
|
|
|
|
|
|
|
for path, schema in SCHEMAS:
|
|
|
|
# Initialize the schema
|
|
|
|
with open(f"schema/{schema}") as schema_file:
|
|
|
|
schema = json.loads(schema_file.read(), parse_float=decimal.Decimal)
|
|
|
|
|
|
|
|
# Validate that the schema exists
|
|
|
|
assert schema, f"Schema definition for {path} is empty!"
|
|
|
|
|
2023-07-19 17:53:37 +02:00
|
|
|
# Ensure files are either added, renamed, modified or type changed (do not get deleted files)
|
|
|
|
CHANGE_TYPE_LIST = ['A', 'R', 'M', 'T']
|
|
|
|
|
2023-07-20 17:14:30 +02:00
|
|
|
# Iterate through changed files
|
2023-07-19 16:32:32 +02:00
|
|
|
for file in changes:
|
2023-07-20 17:14:30 +02:00
|
|
|
# Ensure the files are modified or added, this will disclude deleted files
|
2023-07-19 17:53:37 +02:00
|
|
|
if file.change_type in CHANGE_TYPE_LIST:
|
2023-07-20 17:14:30 +02:00
|
|
|
# If the file is renamed, ensure we are picking the right schema
|
|
|
|
if 'R' in file.change_type and path in file.rename_to:
|
2023-07-20 17:03:34 +02:00
|
|
|
file_list.append((file.rename_to, schema))
|
|
|
|
elif path in file.a_path:
|
|
|
|
file_list.append((file.a_path, schema))
|
|
|
|
elif path in file.b_path:
|
|
|
|
file_list.append((file.b_path, schema))
|
2023-07-19 16:32:32 +02:00
|
|
|
|
|
|
|
return file_list
|
|
|
|
|
2023-03-24 21:50:47 +01:00
|
|
|
def _get_image_files():
|
|
|
|
"""
|
|
|
|
Return a list of all image files within the specified path and manufacturer.
|
|
|
|
"""
|
2023-07-13 02:00:44 +02:00
|
|
|
file_list = []
|
2023-03-24 21:50:47 +01:00
|
|
|
|
2023-07-13 02:00:44 +02:00
|
|
|
# Map each image file to its manufacturer
|
|
|
|
for file in sorted(glob.glob(f"elevation-images{os.path.sep}*{os.path.sep}*", recursive=True)):
|
|
|
|
# Validate that the file extension is valid
|
|
|
|
assert file.split(os.path.sep)[2].split('.')[-1] in IMAGE_FILETYPES, f"Invalid file extension: {file}"
|
2023-03-24 21:50:47 +01:00
|
|
|
|
2023-07-13 02:00:44 +02:00
|
|
|
# Map each image file to its manufacturer as a tuple (manufacturer, file)
|
|
|
|
file_list.append((file.split(os.path.sep)[1], file))
|
2022-02-11 22:22:59 +01:00
|
|
|
|
2023-07-13 02:00:44 +02:00
|
|
|
return file_list
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2023-01-31 15:42:16 +01:00
|
|
|
def _decimal_file_handler(uri):
|
2023-07-14 21:38:14 +02:00
|
|
|
"""
|
|
|
|
Handler to work with floating decimals that fail normal validation.
|
|
|
|
"""
|
2023-01-31 15:42:16 +01:00
|
|
|
with urlopen(uri) as url:
|
|
|
|
result = json.loads(url.read().decode("utf-8"), parse_float=decimal.Decimal)
|
|
|
|
return result
|
|
|
|
|
2019-12-19 17:14:27 +01:00
|
|
|
def test_environment():
|
|
|
|
"""
|
|
|
|
Run basic sanity checks on the environment to ensure tests are running correctly.
|
|
|
|
"""
|
|
|
|
# Validate that definition files exist
|
2023-07-19 16:32:32 +02:00
|
|
|
if definition_files:
|
|
|
|
pytest.skip("No changes to definition files found.")
|
2019-12-19 17:14:27 +01:00
|
|
|
|
2023-07-19 16:32:32 +02:00
|
|
|
definition_files = _get_diff_from_upstream()
|
2023-07-13 02:00:44 +02:00
|
|
|
image_files = _get_image_files()
|
2019-12-19 17:14:27 +01:00
|
|
|
|
2023-07-19 16:32:32 +02:00
|
|
|
if USE_LOCAL_KNOWN_SLUGS:
|
|
|
|
KNOWN_SLUGS = pickle_operations.read_pickle_data(f'{ROOT_DIR}/tests/known-slugs.pickle')
|
|
|
|
else:
|
|
|
|
temp_dir = tempfile.TemporaryDirectory()
|
|
|
|
repo = Repo.clone_from(url=NETBOX_DT_LIBRARY_URL, to_path=temp_dir.name)
|
|
|
|
KNOWN_SLUGS = pickle_operations.read_pickle_data(f'{temp_dir.name}/tests/known-slugs.pickle')
|
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
@pytest.mark.parametrize(('file_path', 'schema'), definition_files)
|
|
|
|
def test_definitions(file_path, schema):
|
2019-12-19 16:19:41 +01:00
|
|
|
"""
|
2022-02-18 20:47:13 +01:00
|
|
|
Validate each definition file using the provided JSON schema and check for duplicate entries.
|
2019-12-19 16:19:41 +01:00
|
|
|
"""
|
2023-07-14 21:38:14 +02:00
|
|
|
# Check file extension. Only .yml or .yaml files are supported.
|
2019-12-26 21:36:28 +01:00
|
|
|
assert file_path.split('.')[-1] in ('yaml', 'yml'), f"Invalid file extension: {file_path}"
|
|
|
|
|
2019-12-19 16:19:41 +01:00
|
|
|
# Read file
|
|
|
|
with open(file_path) as definition_file:
|
2019-12-30 16:11:25 +01:00
|
|
|
content = definition_file.read()
|
|
|
|
|
2023-07-14 21:38:14 +02:00
|
|
|
# Check for trailing newline. YAML files must end with an emtpy newline.
|
2022-02-11 22:22:59 +01:00
|
|
|
assert content.endswith('\n'), "Missing trailing newline"
|
2019-12-30 16:11:25 +01:00
|
|
|
|
2022-02-11 22:22:59 +01:00
|
|
|
# Load YAML data from file
|
2023-01-31 15:42:16 +01:00
|
|
|
definition = yaml.load(content, Loader=DecimalSafeLoader)
|
2019-12-19 16:19:41 +01:00
|
|
|
|
2022-02-18 20:47:13 +01:00
|
|
|
# Validate YAML definition against the supplied schema
|
2019-12-19 17:08:51 +01:00
|
|
|
try:
|
2023-01-31 15:42:16 +01:00
|
|
|
resolver = RefResolver(
|
|
|
|
f"file://{os.getcwd()}/schema/devicetype.json",
|
|
|
|
schema,
|
|
|
|
handlers={"file": _decimal_file_handler},
|
|
|
|
)
|
2023-07-14 21:38:14 +02:00
|
|
|
# Validate definition against schema
|
2022-02-11 22:22:59 +01:00
|
|
|
Draft4Validator(schema, resolver=resolver).validate(definition)
|
2019-12-19 17:08:51 +01:00
|
|
|
except ValidationError as e:
|
2023-07-14 21:38:14 +02:00
|
|
|
# Schema validation failure. Ensure you are following the proper format.
|
2020-11-24 19:25:06 +01:00
|
|
|
pytest.fail(f"{file_path} failed validation: {e}", False)
|
2022-02-18 20:47:13 +01:00
|
|
|
|
2023-07-13 02:00:44 +02:00
|
|
|
# Identify if the definition is for a Device or Module
|
|
|
|
if "device-types" in file_path:
|
2023-07-14 21:38:14 +02:00
|
|
|
# A device
|
2023-07-13 02:00:44 +02:00
|
|
|
this_device = DeviceType(definition, file_path)
|
|
|
|
else:
|
2023-07-14 21:38:14 +02:00
|
|
|
# A module
|
2023-07-13 02:00:44 +02:00
|
|
|
this_device = ModuleType(definition, file_path)
|
|
|
|
|
2023-07-14 21:38:14 +02:00
|
|
|
# Verify the slug is valid, only if the definition type is a Device
|
2023-07-13 02:00:44 +02:00
|
|
|
if this_device.isDevice:
|
2023-07-19 16:32:32 +02:00
|
|
|
assert this_device.verify_slug(KNOWN_SLUGS), pytest.fail(this_device.failureMessage, False)
|
2023-07-13 02:00:44 +02:00
|
|
|
|
2023-07-14 21:38:14 +02:00
|
|
|
# Verify the filename is valid. Must either be the model or part_number.
|
2023-07-13 02:00:44 +02:00
|
|
|
assert verify_filename(this_device), pytest.fail(this_device.failureMessage, False)
|
2022-02-18 20:47:13 +01:00
|
|
|
|
2023-07-14 21:38:14 +02:00
|
|
|
# Check for duplicate components within the definition
|
2023-07-17 21:23:56 +02:00
|
|
|
assert validate_components(COMPONENT_TYPES, this_device), pytest.fail(this_device.failureMessage, False)
|
2022-02-25 16:23:46 +01:00
|
|
|
|
2023-07-14 21:38:14 +02:00
|
|
|
# Check for empty quotes and fail if found
|
2022-02-25 16:23:46 +01:00
|
|
|
def iterdict(var):
|
|
|
|
for dict_value in var.values():
|
|
|
|
if isinstance(dict_value, dict):
|
|
|
|
iterdict(dict_value)
|
|
|
|
if isinstance(dict_value, list):
|
|
|
|
iterlist(dict_value)
|
|
|
|
else:
|
|
|
|
if(isinstance(dict_value, str) and not dict_value):
|
|
|
|
pytest.fail(f'{file_path} has empty quotes', False)
|
|
|
|
|
|
|
|
def iterlist(var):
|
|
|
|
for list_value in var:
|
|
|
|
if isinstance(list_value, dict):
|
|
|
|
iterdict(list_value)
|
|
|
|
elif isinstance(list_value, list):
|
|
|
|
iterlist(list_value)
|
|
|
|
|
2023-07-17 21:23:56 +02:00
|
|
|
# Check for valid power definitions
|
|
|
|
if this_device.isDevice:
|
|
|
|
assert this_device.validate_power(), pytest.fail(this_device.failureMessage, False)
|
|
|
|
|
2023-03-24 21:50:47 +01:00
|
|
|
# Check for images if front_image or rear_image is True
|
|
|
|
if (definition.get('front_image') or definition.get('rear_image')):
|
2023-03-27 21:06:48 +02:00
|
|
|
# Find images for given manufacturer, with matching device slug (exact match including case)
|
2023-07-13 02:00:44 +02:00
|
|
|
manufacturer_images = [image[1] for image in image_files if image[0] == file_path.split('/')[1] and os.path.basename(image[1]).split('.')[0] == this_device.get_slug()]
|
2023-03-24 21:50:47 +01:00
|
|
|
if not manufacturer_images:
|
2023-07-13 02:00:44 +02:00
|
|
|
pytest.fail(f'{file_path} has Front or Rear Image set to True but no images found for manufacturer/device (slug={this_device.get_slug()})', False)
|
2023-03-27 21:06:48 +02:00
|
|
|
elif len(manufacturer_images)>2:
|
2023-07-13 02:00:44 +02:00
|
|
|
pytest.fail(f'More than 2 images found for device with slug {this_device.get_slug()}: {manufacturer_images}', False)
|
2023-03-24 21:50:47 +01:00
|
|
|
|
2023-07-14 21:38:14 +02:00
|
|
|
# If front_image is True, verify that a front image exists
|
2023-03-24 21:50:47 +01:00
|
|
|
if(definition.get('front_image')):
|
2023-03-27 21:06:48 +02:00
|
|
|
front_image = [image_path.split('/')[2] for image_path in manufacturer_images if os.path.basename(image_path).split('.')[1] == 'front']
|
|
|
|
|
|
|
|
if not front_image:
|
|
|
|
pytest.fail(f'{file_path} has front_image set to True but no matching image found for device ({manufacturer_images})', False)
|
|
|
|
|
2023-07-14 21:38:14 +02:00
|
|
|
# If rear_image is True, verify that a front image exists
|
2023-03-24 21:50:47 +01:00
|
|
|
if(definition.get('rear_image')):
|
2023-03-27 21:06:48 +02:00
|
|
|
rear_image = [image_path.split('/')[2] for image_path in manufacturer_images if os.path.basename(image_path).split('.')[1] == 'rear']
|
|
|
|
|
|
|
|
if not rear_image:
|
2023-03-24 21:50:47 +01:00
|
|
|
pytest.fail(f'{file_path} has rear_image set to True but no images found for device', False)
|
|
|
|
|
2022-02-25 16:23:46 +01:00
|
|
|
iterdict(definition)
|