Skip to content

Commit

Permalink
DRAFT: First-cut impl of struct to DDSIDL
Browse files Browse the repository at this point in the history
  • Loading branch information
Krishna Koppolu committed Sep 23, 2023
1 parent 51bf94e commit d1674b5
Show file tree
Hide file tree
Showing 4 changed files with 142 additions and 26 deletions.
42 changes: 42 additions & 0 deletions tests/vspec/test_structs/test.idl
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
module VehicleDataTypes {
module TestBranch1 {
struct NestedStruct {
double x;
double y;
double z;
};

struct ParentStruct {
VehicleDataTypes::TestBranch1::NestedStruct x_property;
VehicleDataTypes::TestBranch1::NestedStruct y_property;
sequence<VehicleDataTypes::TestBranch1::NestedStruct, 10> x_properties;
sequence<VehicleDataTypes::TestBranch1::NestedStruct> y_properties;
double z_property;
};

};
};
module A
{
struct _UInt8
{
octet value;
const string unit="km";
const string type ="sensor";
const string description="A uint8.";
};

struct ParentStructSensor
{
VehicleDataTypes::TestBranch1::ParentStruct value;
const string type ="sensor";
const string description="A rich sensor with user-defined data type.";
};

struct NestedStructSensor
{
VehicleDataTypes::TestBranch1::NestedStruct value;
const string type ="sensor";
const string description="A rich sensor with user-defined data type.";
};
};
2 changes: 1 addition & 1 deletion tests/vspec/test_structs/test_commandline.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def test_error_when_data_types_file_is_missing(change_test_dir):
assert os.WEXITSTATUS(result) == 0


@pytest.mark.parametrize("format", ["binary", "franca", "idl", "graphql"])
@pytest.mark.parametrize("format", ["binary", "franca", "graphql"])
def test_error_with_non_compatible_formats(format, change_test_dir):
# test that program fails due to parser error
cmdline = ('../../../vspec2x.py -u ../test_units.yaml -vt VehicleDataTypes.vspec -ot output_types_file.json'
Expand Down
122 changes: 98 additions & 24 deletions vspec/vssexporters/vss2ddsidl.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@

import argparse
import keyword
import logging

from vspec.loggingconfig import initLogging
from vspec.model.vsstree import VSSNode, VSSType


Expand Down Expand Up @@ -52,7 +54,7 @@ def getAllowedName(name):
or name.lower() in idl_keywords
or keyword.iskeyword(name.lower)
):
return "_"+name
return "_" + name
else:
return name

Expand Down Expand Up @@ -105,7 +107,7 @@ def export_node(node, generate_uuid, generate_all_idl_features):
arraysize = None

if node.type == VSSType.BRANCH:
idlFileBuffer.append("module "+getAllowedName(node.name))
idlFileBuffer.append("module " + getAllowedName(node.name))
idlFileBuffer.append("{")
for child in node.children:
export_node(child, generate_uuid, generate_all_idl_features)
Expand All @@ -120,7 +122,7 @@ def export_node(node, generate_uuid, generate_all_idl_features):
module name for enum is chosen as the node name +
"""
if (node.datatype.value in ["string", "string[]"]):
idlFileBuffer.append("module "+getAllowedName(node.name)+"_M")
idlFileBuffer.append("module " + getAllowedName(node.name) + "_M")
idlFileBuffer.append("{")
idlFileBuffer.append("enum " + getAllowedName(node.name) +
"Values{"+str(",".join(get_allowed_enum_literal(item) for item in node.allowed)) +
Expand All @@ -132,19 +134,24 @@ def export_node(node, generate_uuid, generate_all_idl_features):
print(f"Warning: VSS2IDL can only handle allowed values for string type, "
f"signal {node.name} has type {node.datatype.value}")

idlFileBuffer.append("struct "+getAllowedName(node.name))
idlFileBuffer.append("struct " + getAllowedName(node.name))
idlFileBuffer.append("{")
if generate_uuid:
idlFileBuffer.append("string uuid;")
# fetching value of datatype and obtaining the equivalent DDS type
try:
if str(node.datatype.value) in dataTypesMap_covesa_dds:
datatype = str(dataTypesMap_covesa_dds[str(node.datatype.value)])
elif '[' in str(node.datatype.value):
nodevalueArray = str(node.datatype.value).split("[", 1)
if str(nodevalueArray[0]) in dataTypesMap_covesa_dds:
datatype = str(dataTypesMap_covesa_dds[str(nodevalueArray[0])])
arraysize = '['+str(arraysize)+nodevalueArray[1]
datatype_str = node.get_datatype()
if node.has_datatype():
if datatype_str in dataTypesMap_covesa_dds:
datatype = str(dataTypesMap_covesa_dds[datatype_str])
elif '[' in datatype_str:
nodevalueArray = datatype_str.split("[", 1)
# TODO: Handle fixed size arrays
if str(nodevalueArray[0]) in dataTypesMap_covesa_dds:
datatype = str(dataTypesMap_covesa_dds[str(nodevalueArray[0])])
arraysize = '[' + str(arraysize) + nodevalueArray[1]
else: # no primitive type. this is custom
datatype = datatype_str.replace(".", "::") # custom data type

except AttributeError:
pass
Expand All @@ -166,27 +173,28 @@ def export_node(node, generate_uuid, generate_all_idl_features):
if datatype is not None:
# adding range if min and max are specified in vspec file
if min is not None and max is not None and generate_all_idl_features:
idlFileBuffer.append("@range(min="+str(min)+" ,max="+str(max)+")")
idlFileBuffer.append("@range(min=" + str(min) + " ,max=" + str(max) + ")")

if allowedValues is None:
if defaultValue is None:
idlFileBuffer.append(("sequence<"+datatype+"> value" if (arraysize is not None) else
datatype+" value")+";")
idlFileBuffer.append(
("sequence<" + datatype + "> value" if arraysize is not None else datatype + " value") + ";")
else:
# default values in IDL file are not accepted by CycloneDDS/FastDDS :
# these values can be generated if --all-idl-features is set as True
idlFileBuffer.append(("sequence<"+datatype+"> value" if arraysize is not None else
datatype + " value") +
(" default " + str(defaultValue) if generate_all_idl_features else "") + ";")
idlFileBuffer.append(
("sequence<" + datatype + "> value" if arraysize is not None else datatype + " value") +
(" default " + str(defaultValue) if generate_all_idl_features else "") + ";")
else:
# this is the case where allowed values are provided, accordingly contents are converted to enum
if defaultValue is None:
idlFileBuffer.append(getAllowedName(node.name)+"_M::"+getAllowedName(node.name)+"Values value;")
else:
# default values in IDL file are not accepted by CycloneDDS/FastDDS :
# these values can be generated if --all-idl-features is set as True
idlFileBuffer.append(getAllowedName(node.name) + "_M::"+getAllowedName(node.name) + "Values value" +
(" " + str(defaultValue) if generate_all_idl_features else "") + ";")
idlFileBuffer.append(
getAllowedName(node.name) + "_M::" + getAllowedName(node.name) +
"Values value" + (" " + str(defaultValue) if generate_all_idl_features else "") + ";")

if unit is not None:
idlFileBuffer.append(("" if generate_all_idl_features else "//") + "const string unit=\"" + unit + "\";")
Expand All @@ -199,16 +207,82 @@ def export_node(node, generate_uuid, generate_all_idl_features):
idlFileBuffer.append("};")


class StructExporter(object):
def __init__(self):
self.str_buf = ""
self.structs_seen = []

def export(self, root) -> str:
self.str_buf = ""
self.structs_seen = []
self.export_data_type_node(root)
return self.str_buf

def export_data_type_node(self, node):
"""
This method is used to traverse VSS node and to create corresponding DDS IDL buffer string
"""

prefix = ""
suffix = ""
if node.is_branch():
prefix = f"module {getAllowedName(node.name)}" + " {\n"
suffix = "};\n"
elif node.is_struct():
# check if the properties use structs that have not been seen before
# if not, add a forward declaration
fwds = []
for c in node.children:
# primtive type
if c.has_datatype():
continue

datatype_str = c.get_datatype().replace('.', '::').split("[", 1)[0]
if datatype_str not in self.structs_seen:
base_type = datatype_str.split("::")[-1]
fwds.append(base_type)

fwds.sort()
for f in set(fwds):
prefix += f"struct {f};\n"

prefix += f"struct {getAllowedName(node.name)}" + " {\n"
suffix = "};\n"
self.structs_seen.append(node.qualified_name("::").split("[", 1)[0])
else:
datatype_str = node.get_datatype().replace('.', '::').split("[", 1)[0]
is_seq = '[' in node.get_datatype()
if is_seq:
self.str_buf += f"sequence<{datatype_str}> {getAllowedName(node.name)};\n"
else:
self.str_buf += (f"{datatype_str} {getAllowedName(node.name)};\n")

self.str_buf += (f"{prefix}")

for child in node.children:
self.export_data_type_node(child)
self.str_buf += suffix


def export_idl(file, root, generate_uuids=True, generate_all_idl_features=False):
"""This method is used to traverse through the root VSS node to build
-> DDS IDL equivalent string buffer and to serialize it acccordingly into a file
"""
export_node(root, generate_uuids, generate_all_idl_features)
file.write('\n'.join(idlFileBuffer))
print("IDL file generated at location : "+file.name)
logging.info("IDL file generated at location : " + file.name)


def export(config: argparse.Namespace, signal_root: VSSNode, print_uuid, data_type_root: VSSNode):
logging.info("Generating DDS-IDL output...")

if data_type_root is not None:
exporter = StructExporter()
with open(config.output_file, 'w') as idl_out:
idl_out.write(exporter.export(data_type_root))

with open(config.output_file, 'a' if data_type_root is not None else 'w') as idl_out:
export_idl(idl_out, signal_root, print_uuid, config.all_idl_features)

def export(config: argparse.Namespace, root: VSSNode, print_uuid):
print("Generating DDS-IDL output...")
idl_out = open(config.output_file, 'w')
export_idl(idl_out, root, print_uuid, config.all_idl_features)
if __name__ == "__main__":
initLogging()
2 changes: 1 addition & 1 deletion vspec2x.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from vspec.vssexporters import vss2json, vss2csv, vss2yaml, \
vss2binary, vss2franca, vss2ddsidl, vss2graphql, vss2protobuf

SUPPORTED_STRUCT_EXPORT_FORMATS = set(["json", "yaml", "csv", "protobuf"])
SUPPORTED_STRUCT_EXPORT_FORMATS = set(["json", "yaml", "csv", "protobuf", "idl"])


class Exporter(Enum):
Expand Down

0 comments on commit d1674b5

Please sign in to comment.