Restore master (#1473)

* Pytest Optimizations (#1465)

* Adding function to determin file changes from upstream via git

* updated test env function to properly skip when files are empty

* updating to use generated slug list

* fixing if statement

* added known slugs functionality to improve runtime

* updating slugs for using git repo

* adding final changes

* updating library url to be correct

* Update master-slugs.yml

* commit to force pr merge (#1466)

* Pytest pr (#1467)

* commit to force pr merge

* commit to force pr merge

* Update master-slugs.yml

* Regenerate master slug list after successful PR merge

* Update master-slugs.yml

* fianl changes to new master slug workflow (#1468)

* Final pr test (#1469)

* fianl changes to new master slug workflow

* trying new branch push

* changing to myself (#1470)

* testing permission on github_token

* testing permission on github_token

* testing permission on github_token

---------

Co-authored-by: NetBox Bot <info@netboxlabs.com>
This commit is contained in:
Daniel W. Anner 2023-07-19 10:32:32 -04:00 committed by GitHub
parent e42a14b180
commit 15a3e7a9d1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 196 additions and 9 deletions

32
.github/workflows/master-slugs.yml vendored Normal file
View File

@ -0,0 +1,32 @@
---
name: Create Master Slug List on PR Merge
on:
push:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: 3.8
- name: Install dependencies
run: pip install -r requirements.txt
- name: Regenerate Master Slug List
run: python3 tests/generate-slug-list.py
- name: Commit and Push Changes to Master
uses: EndBug/add-and-commit@v9
with:
author_name: NetBox-Bot
author_email: info@netboxlabs.com
committer_name: NetBox-Bot
committer_email: info@netboxlabs.com
default_author: github_actions
message: "Regenerate master slug list after successful PR merge"
push: true

View File

@ -3,3 +3,4 @@ pre-commit==3.3.3
pytest==7.4.0
PyYAML==6.0
yamllint==1.32.0
gitpython==3.1.32

View File

@ -1,16 +1,19 @@
from test_configuration import COMPONENT_TYPES, IMAGE_FILETYPES, SCHEMAS
from test_configuration import COMPONENT_TYPES, IMAGE_FILETYPES, SCHEMAS, KNOWN_SLUGS, ROOT_DIR, USE_LOCAL_KNOWN_SLUGS, NETBOX_DT_LIBRARY_URL
import pickle_operations
from yaml_loader import DecimalSafeLoader
from device_types import DeviceType, ModuleType, verify_filename, validate_components
import decimal
import glob
import json
import os
import tempfile
from urllib.request import urlopen
import pytest
import yaml
from jsonschema import Draft4Validator, RefResolver
from jsonschema.exceptions import ValidationError
from git import Repo
def _get_definition_files():
"""
@ -32,6 +35,29 @@ def _get_definition_files():
return file_list
def _get_diff_from_upstream():
file_list = []
repo = Repo(f"{os.path.dirname(os.path.abspath(__file__))}/../")
upstream = repo.remotes.upstream
upstream.fetch()
changes = repo.head.commit.diff(upstream.refs["master"].object.hexsha)
for path, schema in SCHEMAS:
# Initialize the schema
with open(f"schema/{schema}") as schema_file:
schema = json.loads(schema_file.read(), parse_float=decimal.Decimal)
# Validate that the schema exists
assert schema, f"Schema definition for {path} is empty!"
for file in changes:
if file.b_path is not None:
if path in file.b_path:
file_list.append((file.b_path, schema))
return file_list
def _get_image_files():
"""
Return a list of all image files within the specified path and manufacturer.
@ -61,11 +87,19 @@ def test_environment():
Run basic sanity checks on the environment to ensure tests are running correctly.
"""
# Validate that definition files exist
assert definition_files, "No definition files found!"
if definition_files:
pytest.skip("No changes to definition files found.")
definition_files = _get_definition_files()
definition_files = _get_diff_from_upstream()
image_files = _get_image_files()
if USE_LOCAL_KNOWN_SLUGS:
KNOWN_SLUGS = pickle_operations.read_pickle_data(f'{ROOT_DIR}/tests/known-slugs.pickle')
else:
temp_dir = tempfile.TemporaryDirectory()
repo = Repo.clone_from(url=NETBOX_DT_LIBRARY_URL, to_path=temp_dir.name)
KNOWN_SLUGS = pickle_operations.read_pickle_data(f'{temp_dir.name}/tests/known-slugs.pickle')
@pytest.mark.parametrize(('file_path', 'schema'), definition_files)
def test_definitions(file_path, schema):
"""
@ -107,7 +141,7 @@ def test_definitions(file_path, schema):
# Verify the slug is valid, only if the definition type is a Device
if this_device.isDevice:
assert this_device.verify_slug(), pytest.fail(this_device.failureMessage, False)
assert this_device.verify_slug(KNOWN_SLUGS), pytest.fail(this_device.failureMessage, False)
# Verify the filename is valid. Must either be the model or part_number.
assert verify_filename(this_device), pytest.fail(this_device.failureMessage, False)

View File

@ -1,4 +1,3 @@
from test_configuration import KNOWN_SLUGS
import os
class DeviceType:
@ -41,9 +40,18 @@ class DeviceType:
def get_filepath(self):
return self.file_path
def verify_slug(self):
def verify_slug(self, KNOWN_SLUGS):
# Verify the slug is unique, and not already known
if self.slug in KNOWN_SLUGS:
known_slug_list_intersect = [(slug, file_path) for slug, file_path in KNOWN_SLUGS if slug == self.slug]
if len(known_slug_list_intersect) == 0:
pass
elif len(known_slug_list_intersect) == 1:
if self.file_path not in known_slug_list_intersect[0][1]:
self.failureMessage = f'{self.file_path} has a duplicate slug: "{self.slug}"'
return False
return True
else:
self.failureMessage = f'{self.file_path} has a duplicate slug "{self.slug}"'
return False
@ -58,7 +66,7 @@ class DeviceType:
return False
# Add the slug to the list of known slugs
KNOWN_SLUGS.add(self.slug)
KNOWN_SLUGS.add((self.slug, self.file_path))
return True
def validate_power(self):

View File

@ -0,0 +1,90 @@
import os
import json
import glob
import yaml
import decimal
from yaml_loader import DecimalSafeLoader
from jsonschema import Draft4Validator, RefResolver
from jsonschema.exceptions import ValidationError
from test_configuration import SCHEMAS, KNOWN_SLUGS, ROOT_DIR
from urllib.request import urlopen
import pickle_operations
def _get_device_type_files():
"""
Return a list of all definition files within the specified path.
"""
file_list = []
for path, schema in SCHEMAS:
if path == 'device-types':
# Initialize the schema
with open(f"{ROOT_DIR}/schema/{schema}") as schema_file:
schema = json.loads(schema_file.read(),
parse_float=decimal.Decimal)
# Validate that the schema exists
if not schema:
print(f"Schema definition for {path} is empty!")
exit(1)
# Map each definition file to its schema as a tuple (file, schema)
for file in sorted(glob.glob(f"{path}/*/*", recursive=True)):
file_list.append((f'{file}', schema))
return file_list
def _decimal_file_handler(uri):
"""
Handler to work with floating decimals that fail normal validation.
"""
with urlopen(uri) as url:
result = json.loads(url.read().decode("utf-8"), parse_float=decimal.Decimal)
return result
def load_file(file_path, schema):
# Read file
try:
with open(file_path) as definition_file:
content = definition_file.read()
except Exception as exc:
return (False, f'Error opening "{file_path}". stderr: {exc}')
# Check for trailing newline. YAML files must end with an emtpy newline.
if not content.endswith('\n'):
return (False, f'{file_path} is missing trailing newline')
# Load YAML data from file
try:
definition = yaml.load(content, Loader=DecimalSafeLoader)
except Exception as exc:
return (False, f'Error during yaml.load "{file_path}". stderr: {exc}')
# Validate YAML definition against the supplied schema
try:
resolver = RefResolver(
f"file://{os.getcwd()}/schema/devicetype.json",
schema,
handlers={"file": _decimal_file_handler},
)
# Validate definition against schema
Draft4Validator(schema, resolver=resolver).validate(definition)
except ValidationError as exc:
# Schema validation failure. Ensure you are following the proper format.
return (False, f'{file_path} failed validation: {exc}')
return (True, definition)
def _generate_known_slugs():
all_files = _get_device_type_files()
for file_path, schema in all_files:
definition_status, definition = load_file(file_path, schema)
if not definition_status:
print(definition)
exit(1)
KNOWN_SLUGS.add((definition.get('slug'), file_path))
_generate_known_slugs()
pickle_operations.write_pickle_data(KNOWN_SLUGS, f'{ROOT_DIR}/tests/known-slugs.pickle')

BIN
tests/known-slugs.pickle Normal file

Binary file not shown.

View File

@ -0,0 +1,14 @@
import pickle
def write_pickle_data(data, file_path):
with open(file_path, 'wb') as pickle_file:
pickle.dump(data, pickle_file)
pickle_file.close()
def read_pickle_data(file_path):
with open(file_path, 'rb') as pickle_file:
data = pickle.load(pickle_file)
pickle_file.close()
return data

View File

@ -1,3 +1,5 @@
import os
SCHEMAS = (
('device-types', 'devicetype.json'),
('module-types', 'moduletype.json'),
@ -19,4 +21,10 @@ COMPONENT_TYPES = (
'module-bays',
)
KNOWN_SLUGS = set()
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..'))
KNOWN_SLUGS = set()
USE_LOCAL_KNOWN_SLUGS = False
NETBOX_DT_LIBRARY_URL = "https://github.com/netbox-community/devicetype-library.git"