Skip to content

Commit

Permalink
Added struct default validation
Browse files Browse the repository at this point in the history
Signed-off-by: Sebastian Schleemilch <[email protected]>
  • Loading branch information
sschleemilch committed Oct 25, 2024
1 parent 2f1a984 commit bd20706
Show file tree
Hide file tree
Showing 11 changed files with 406 additions and 24 deletions.
185 changes: 183 additions & 2 deletions poetry.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ importlib-metadata = "^7.0"
click = "^8.1.7"
rich-click = "^1.8.3"
pydantic = "^2.8.2"
jsonschema = "^4.23.0"

[tool.poetry.group.dev.dependencies]
mypy = "*"
Expand Down
4 changes: 2 additions & 2 deletions src/vss_tools/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@

# Global objects to be extended by other code parts
dynamic_datatypes: Set[str] = set()
dynamic_struct_schemas: dict[str, dict[str, Any]] = {}
dynamic_quantities: list[str] = []
# This one contains the unit name as well as the list of allowed-datatypes
dynamic_units: dict[str, list] = {}
dynamic_units: dict[str, list] = {} # unit name -> allowed datatypes


class DatatypesException(Exception):
Expand Down
11 changes: 6 additions & 5 deletions src/vss_tools/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
VSSDataStruct,
get_all_model_fields,
)
from vss_tools.tree import ModelValidationException, VSSNode, build_tree
from vss_tools.tree import ModelValidationException, VSSNode, add_struct_schemas, build_tree
from vss_tools.units_quantities import load_quantities, load_units
from vss_tools.vspec import InvalidSpecDuplicatedEntryException, InvalidSpecException, load_vspec

Expand Down Expand Up @@ -126,10 +126,6 @@ def get_types_root(types: tuple[Path, ...], include_dirs: list[Path]) -> VSSNode
else:
types_root = root

if dynamic_datatypes:
log.info(f"Dynamic datatypes added={len(dynamic_datatypes)}")
log.debug(f"Dynamic datatypes:\n{dynamic_datatypes}")

# Checking whether user defined root types e.g 'MyType'
# instead of 'Types.MyType'
if not all(["." in t for t in dynamic_datatypes]):
Expand All @@ -142,6 +138,11 @@ def get_types_root(types: tuple[Path, ...], include_dirs: list[Path]) -> VSSNode
log.critical(e)
exit(1)

if dynamic_datatypes:
log.info(f"Dynamic datatypes added={len(dynamic_datatypes)}")
log.debug(f"Dynamic datatypes:\n{dynamic_datatypes}")
add_struct_schemas(types_root)

return types_root


Expand Down
60 changes: 47 additions & 13 deletions src/vss_tools/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from enum import Enum
from typing import Any

import jsonschema
from pydantic import (
BaseModel,
ConfigDict,
Expand All @@ -23,7 +24,9 @@
from vss_tools import log
from vss_tools.datatypes import (
Datatypes,
DatatypesException,
dynamic_quantities,
dynamic_struct_schemas,
dynamic_units,
get_all_datatypes,
is_array,
Expand All @@ -48,7 +51,7 @@ def __init__(self, element: str | None, ve: ValidationError):
self.ve = ve

def __str__(self) -> str:
errors = self.ve.errors(include_url=False)
errors = self.ve.errors(include_url=False, include_context=False)
return f"'{self.element}' has {len(errors)} model error(s):\n{pretty_repr(errors)}"


Expand Down Expand Up @@ -140,7 +143,7 @@ def fill_instances(cls, v: Any) -> list[str]:
if v is None:
return []
if not (isinstance(v, str) or isinstance(v, list)):
assert False, f"'{v}' is not a valid 'instances' content"
raise ValueError(f"'{v}' is not a valid 'instances' content")
if isinstance(v, str):
return [v]
return v
Expand Down Expand Up @@ -180,7 +183,7 @@ class VSSDataDatatype(VSSData):
max: int | float | None = None
unit: str | None = None
allowed: list[str | int | float | bool] | None = None
default: list[str | int | float | bool] | str | int | float | bool | None = None
default: Any = None

@model_validator(mode="after")
def check_type_arraysize_consistency(self) -> Self:
Expand All @@ -192,30 +195,57 @@ def check_type_arraysize_consistency(self) -> Self:
assert is_array(self.datatype), f"'arraysize' set on a non array datatype: '{self.datatype}'"
return self

def check_min_max_valid_datatype(self) -> Self:
if self.min or self.max:
try:
Datatypes.is_subtype_of(self.datatype, Datatypes.NUMERIC[0])
except DatatypesException:
raise ValueError(f"Cannot define min/max for datatype '{self.datatype}'")
if is_array(self.datatype):
raise ValueError("Cannot define min/max for array datatypes")
return self

def check_default_min_max(self) -> Self:
if self.default:
if self.min and self.default < self.min:
raise ValueError(f"'default' smaller than 'min': {self.default}<{self.min}")
if self.max and self.default > self.max:
raise ValueError(f"'default' greater than 'max': {self.default}>{self.min}")
return self

def check_type_default_consistency(self) -> Self:
"""
Checks that the default value
is consistent with the given datatype
"""
if self.default is not None:
if is_array(self.datatype):
array = is_array(self.datatype)
if array:
assert isinstance(
self.default, list
), f"'default' with type '{type(self.default)}' does not match datatype '{self.datatype}'"
if self.arraysize:
assert len(self.default) == self.arraysize, "'default' array size does not match 'arraysize'"
for v in self.default:
assert Datatypes.is_datatype(v, self.datatype), f"'{v}' is not of type '{self.datatype}'"
else:
assert not isinstance(
self.default, list
), f"'default' with type '{type(self.default)}' does not match datatype '{self.datatype}'"
assert Datatypes.is_datatype(
self.default, self.datatype
), f"'{self.default}' is not of type '{self.datatype}'"

check_values = [self.default]
if array:
check_values = self.default

if Datatypes.get_type(self.datatype) is None:
for check_value in check_values:
try:
jsonschema.validate(check_value, dynamic_struct_schemas[self.datatype.strip("[]")])
except jsonschema.ValidationError as e:
raise ValueError(f"invalid 'default' format for datatype '{self.datatype}': {e.message}")
else:
for v in check_values:
assert Datatypes.is_datatype(v, self.datatype), f"'{v}' is not of type '{self.datatype}'"
return self

@model_validator(mode="after")
def check_default_values_allowed(self) -> Self:
"""
Checks that the given default values
Expand All @@ -235,6 +265,7 @@ def check_allowed_datatype_consistency(self) -> Self:
datatypes
"""
if self.allowed:
assert Datatypes.get_type(self.datatype), "'allowed' cannot be used with struct datatype"
for v in self.allowed:
assert Datatypes.is_datatype(v, self.datatype), f"'{v}' is not of type '{self.datatype}'"
return self
Expand All @@ -252,8 +283,11 @@ def check_allowed_min_max(self) -> Self:
def check_datatype(self) -> Self:
assert self.datatype in get_all_datatypes(self.fqn), f"'{self.datatype}' is not a valid datatype"
self.datatype = resolve_datatype(self.datatype, self.fqn)
self.check_type_default_consistency()
self.check_allowed_datatype_consistency()
self = self.check_type_default_consistency()
self = self.check_allowed_datatype_consistency()
self = self.check_default_values_allowed()
self = self.check_min_max_valid_datatype()
self = self.check_default_min_max()
return self

@field_validator("unit")
Expand All @@ -271,7 +305,7 @@ def check_datatype_matching_allowed_unit_datatypes(self) -> Self:
referenced in the unit if given
"""
if self.unit:
assert Datatypes.get_type(self.datatype), f"Cannot use 'unit' with complex datatype: '{self.datatype}'"
assert Datatypes.get_type(self.datatype), f"Cannot use 'unit' with struct datatype: '{self.datatype}'"
assert any(
Datatypes.is_subtype_of(self.datatype.rstrip("[]"), a) for a in dynamic_units[self.unit]
), f"'{self.datatype}' is not allowed for unit '{self.unit}'"
Expand Down
60 changes: 59 additions & 1 deletion src/vss_tools/tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,13 @@
from pydantic import ValidationError

from vss_tools import log
from vss_tools.datatypes import Datatypes, dynamic_datatypes
from vss_tools.datatypes import Datatypes, dynamic_datatypes, dynamic_struct_schemas, is_array
from vss_tools.model import (
ModelValidationException,
VSSData,
VSSDataBranch,
VSSDataDatatype,
VSSDataProperty,
VSSDataStruct,
VSSRaw,
get_vss_raw,
Expand Down Expand Up @@ -530,3 +531,60 @@ def expand_string(s: str) -> list[str]:
for i in range(int(match.group(2)), int(match.group(3)) + 1):
expanded.append(s.replace(match.group(1), str(i)))
return expanded


def add_struct_schemas(types_root: VSSNode):
for node in PreOrderIter(types_root, filter_=lambda n: isinstance(n.data, VSSDataStruct)):
log.info(node)
schema = {
"$schema": "https://json-schema.org/draft/2020-12/schema",
"type": "object",
}
add_node_schema(types_root, node.get_fqn(), schema)
dynamic_struct_schemas[node.get_fqn()] = schema


def add_node_schema(root: VSSNode, fqn: str, schema: dict[str, Any]) -> None:
datatype_map = {
Datatypes.UINT8[0]: "number",
Datatypes.INT8[0]: "number",
Datatypes.UINT16[0]: "number",
Datatypes.INT16[0]: "number",
Datatypes.UINT32[0]: "number",
Datatypes.INT32[0]: "number",
Datatypes.UINT64[0]: "number",
Datatypes.INT64[0]: "number",
Datatypes.FLOAT[0]: "number",
Datatypes.DOUBLE[0]: "number",
Datatypes.NUMERIC[0]: "number",
Datatypes.BOOLEAN[0]: "boolean",
}

node = root.get_node_with_fqn(fqn)
if node:
properties: dict[str, Any] = {}
child: VSSNode
for child in node.children:
if isinstance(child.data, VSSDataProperty):
array = is_array(child.data.datatype)
input_datatype = child.data.datatype.strip("[]")
datatype: str | None = None
if input_datatype in datatype_map:
datatype = datatype_map[input_datatype]
else:
d = Datatypes.get_type(input_datatype)
if d:
datatype = d[0]
if datatype:
log.debug(f"Datatype: {datatype}")
if array:
properties[child.name] = {"type": "array", "items": {"type": datatype}}
else:
properties[child.name] = {"type": datatype}
# A referenced struct
else:
properties[child.name] = {"type": "object"}
add_node_schema(root, input_datatype, properties[child.name])

schema["required"] = list(properties.keys())
schema["properties"] = properties
25 changes: 25 additions & 0 deletions tests/vspec/test_structs/struct_default_model_nok.vspec
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
A:
type: branch
description: A

A.B:
type: sensor
description: B
datatype: Types.T
default:
m: definitely not an uint8
complex:
m:
- true
- false

A.C:
type: sensor
description: C
datatype: Types.T[]
default:
- m: definitely not an uint8
complex:
m:
- true
- false
25 changes: 25 additions & 0 deletions tests/vspec/test_structs/struct_default_model_ok.vspec
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
A:
type: branch
description: A

A.B:
type: sensor
description: B
datatype: Types.T
default:
m: 10
complex:
m:
- true
- false

A.C:
type: sensor
description: C
datatype: Types.T[]
default:
- m: 10
complex:
m:
- true
- false
26 changes: 26 additions & 0 deletions tests/vspec/test_structs/struct_default_types.vspec
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
Types:
type: branch
description: Types

Types.T:
type: struct
description: T

Types.T.m:
type: property
description: Tx
datatype: uint8

Types.T.complex:
type: property
description: TComplex
datatype: Z

Types.Z:
type: struct
description: Z

Types.Z.m:
type: property
description: Zm
datatype: boolean[]
2 changes: 1 addition & 1 deletion tests/vspec/test_structs/test_data_type_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def test_error_when_no_user_defined_data_types_are_provided(tmp_path):
(
"test_with_unit_on_struct_signal.vspec",
"VehicleDataTypes.vspec",
"Cannot use 'unit' with complex datatype: 'VehicleDataTypes.TestBranch1.ParentStruct'",
"Cannot use 'unit' with struct datatype: 'VehicleDataTypes.TestBranch1.ParentStruct'",
),
],
)
Expand Down
31 changes: 31 additions & 0 deletions tests/vspec/test_structs/test_default.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Copyright (c) 2024 Contributors to COVESA
#
# This program and the accompanying materials are made available under the
# terms of the Mozilla Public License 2.0 which is available at
# https://www.mozilla.org/en-US/MPL/2.0/
#
# SPDX-License-Identifier: MPL-2.0

import subprocess
from pathlib import Path

HERE = Path(__file__).resolve().parent


def test_struct_default(tmp_path):
vspec = HERE / "struct_default_model_ok.vspec"
types = HERE / "struct_default_types.vspec"
cmd = f"vspec export tree -s {vspec} -t {types}"

# ok
p = subprocess.run(cmd.split())
assert p.returncode == 0

# nok
log = tmp_path / "log.txt"
vspec = HERE / "struct_default_model_nok.vspec"
cmd = f"vspec --log-file {log} export tree -s {vspec} -t {types}"
p = subprocess.run(cmd.split(), capture_output=True, text=True)
assert p.returncode != 0
print(log.read_text())
assert "invalid 'default' format for datatype 'Types.T'" in log.read_text()

0 comments on commit bd20706

Please sign in to comment.