diff --git a/mapping_workbench/backend/conceptual_mapping_rule/models/entity.py b/mapping_workbench/backend/conceptual_mapping_rule/models/entity.py
index eac5ed2ca..079b88ab0 100644
--- a/mapping_workbench/backend/conceptual_mapping_rule/models/entity.py
+++ b/mapping_workbench/backend/conceptual_mapping_rule/models/entity.py
@@ -251,3 +251,4 @@ class Settings(BaseProjectResourceEntity.Settings):
class ConceptualMappingRuleData(ConceptualMappingRule):
source_structural_element: Optional[StructuralElement] = None
+ mapping_groups: Optional[List[MappingGroup]] = None
diff --git a/mapping_workbench/backend/conceptual_mapping_rule/services/cm2shacl.py b/mapping_workbench/backend/conceptual_mapping_rule/services/cm2shacl.py
index 7f8faca28..21f5b1c52 100644
--- a/mapping_workbench/backend/conceptual_mapping_rule/services/cm2shacl.py
+++ b/mapping_workbench/backend/conceptual_mapping_rule/services/cm2shacl.py
@@ -4,8 +4,8 @@
from mapping_workbench.backend.conceptual_mapping_rule.adapters.cm2shacl import CMtoSHACL
from mapping_workbench.backend.conceptual_mapping_rule.models.entity import ConceptualMappingRuleData
-from mapping_workbench.backend.conceptual_mapping_rule.services.data import get_conceptual_mapping_rules_for_project, \
- get_conceptual_mapping_rules_with_elements_for_project_and_package
+from mapping_workbench.backend.conceptual_mapping_rule.services.data import \
+ get_conceptual_mapping_rules_with_data_for_project_and_package
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
from mapping_workbench.backend.ontology.services.namespaces import get_prefixes_definitions
from mapping_workbench.backend.project.models.entity import Project
@@ -43,7 +43,7 @@ async def generate_shacl_shapes_from_cm_rules(
).delete()
cm_rules: List[ConceptualMappingRuleData] = \
- await get_conceptual_mapping_rules_with_elements_for_project_and_package(project_id, mapping_package)
+ await get_conceptual_mapping_rules_with_data_for_project_and_package(project_id, mapping_package)
shacl_shapes: List[SHACLTestFileResource] = CMtoSHACL(
prefixes=(prefixes or (await get_prefixes_definitions(project_id))),
diff --git a/mapping_workbench/backend/conceptual_mapping_rule/services/data.py b/mapping_workbench/backend/conceptual_mapping_rule/services/data.py
index 551c33934..fae33d7cc 100644
--- a/mapping_workbench/backend/conceptual_mapping_rule/services/data.py
+++ b/mapping_workbench/backend/conceptual_mapping_rule/services/data.py
@@ -36,7 +36,7 @@ async def get_conceptual_mapping_rules_for_project_and_package(
return items
-async def get_conceptual_mapping_rules_with_elements_for_project_and_package(
+async def get_conceptual_mapping_rules_with_data_for_project_and_package(
project_id: PydanticObjectId,
mapping_package: MappingPackage = None
) -> \
@@ -56,6 +56,11 @@ async def get_conceptual_mapping_rules_with_elements_for_project_and_package(
item_data.source_structural_element = source_structural_element \
if isinstance(source_structural_element, StructuralElement) else None
+ mapping_groups_data = []
+ if item.mapping_groups:
+ mapping_groups_data = [await mapping_group.fetch() for mapping_group in item.mapping_groups]
+ item_data.mapping_groups = mapping_groups_data
+
items_data.append(item_data)
return items_data
diff --git a/mapping_workbench/backend/core/adapters/exporter.py b/mapping_workbench/backend/core/adapters/exporter.py
new file mode 100644
index 000000000..e2dc89550
--- /dev/null
+++ b/mapping_workbench/backend/core/adapters/exporter.py
@@ -0,0 +1,11 @@
+from pathlib import Path
+
+
+class ArchiveExporter:
+ @classmethod
+ def write_to_file(cls, file_path: Path, file_content: str):
+ file_path.write_text(file_content, encoding="utf-8")
+
+ @classmethod
+ def create_dir(cls, path: Path):
+ path.mkdir(parents=True, exist_ok=True)
diff --git a/mapping_workbench/backend/core/services/request.py b/mapping_workbench/backend/core/services/request.py
index 8835782ff..f7c8f606b 100644
--- a/mapping_workbench/backend/core/services/request.py
+++ b/mapping_workbench/backend/core/services/request.py
@@ -8,13 +8,17 @@
from mapping_workbench.backend.user.models.user import User
-def request_update_data(entity_data: BaseModel, user: User = None) -> dict:
- data = entity_data.model_dump(exclude_unset=True)
-
+def request_data_update_refs(data: dict, entity_data: BaseModel) -> dict:
for field in entity_data.model_fields:
prop = entity_data.__dict__[field]
if isinstance(prop, (Link, PydanticObjectId)):
data[field] = prop
+ return data
+
+
+def request_update_data(entity_data: BaseModel, user: User = None) -> dict:
+ data = entity_data.model_dump(exclude_unset=True)
+ data = request_data_update_refs(data, entity_data)
if user:
data['updated_by'] = User.link_from_id(user.id)
@@ -25,6 +29,7 @@ def request_update_data(entity_data: BaseModel, user: User = None) -> dict:
def request_create_data(entity_data: BaseModel, user: User = None) -> dict:
data = entity_data.model_dump()
+ data = request_data_update_refs(data, entity_data)
if user:
data['created_by'] = User.link_from_id(user.id)
diff --git a/mapping_workbench/backend/mapping_package/entrypoints/api/routes.py b/mapping_workbench/backend/mapping_package/entrypoints/api/routes.py
index cfef8aeb4..665bc4066 100644
--- a/mapping_workbench/backend/mapping_package/entrypoints/api/routes.py
+++ b/mapping_workbench/backend/mapping_package/entrypoints/api/routes.py
@@ -101,7 +101,7 @@ async def route_create_default_mapping_package(
epo_version=package_epo_version,
eform_subtypes=[],
eforms_sdk_versions=package_eforms_sdk_versions,
- project=project
+ project=Project.link_from_id(project.id)
)
return await create_mapping_package(data, user=user)
diff --git a/mapping_workbench/backend/mapping_package/models/entity.py b/mapping_workbench/backend/mapping_package/models/entity.py
index ad1ae7d07..81385744b 100644
--- a/mapping_workbench/backend/mapping_package/models/entity.py
+++ b/mapping_workbench/backend/mapping_package/models/entity.py
@@ -181,6 +181,7 @@ async def get_conceptual_mapping_rules_states(self) -> List[ConceptualMappingRul
Eq(ConceptualMappingRule.refers_to_mapping_package_ids, self.id),
Eq(ConceptualMappingRule.project, self.project)
).to_list()
+
conceptual_mapping_rule_states = [
await conceptual_mapping_rule.get_state() for conceptual_mapping_rule in conceptual_mapping_rules
]
@@ -205,10 +206,12 @@ async def get_test_data_suites_states(self) -> List[TestDataSuiteState]:
In(TestDataSuite.id, test_data_suites_ids),
Eq(TestDataSuite.project, self.project)
).to_list()
+
if test_data_suites:
for test_data_suite in test_data_suites:
test_data_suite_state = await test_data_suite.get_state()
test_data_suites_states.append(test_data_suite_state)
+
return test_data_suites_states
async def get_shacl_test_suites_states(self) -> List[SHACLTestSuiteState]:
diff --git a/mapping_workbench/backend/mapping_rule_registry/services/data.py b/mapping_workbench/backend/mapping_rule_registry/services/data.py
new file mode 100644
index 000000000..f8d00c005
--- /dev/null
+++ b/mapping_workbench/backend/mapping_rule_registry/services/data.py
@@ -0,0 +1,15 @@
+from typing import List
+
+from beanie import PydanticObjectId
+
+from mapping_workbench.backend.mapping_rule_registry.models.entity import MappingGroup
+from mapping_workbench.backend.project.models.entity import Project
+
+
+async def get_mapping_groups_for_project(project_id: PydanticObjectId) -> \
+ List[MappingGroup]:
+ items: List[MappingGroup] = await MappingGroup.find(
+ MappingGroup.project == Project.link_from_id(project_id)
+ ).to_list()
+
+ return items
diff --git a/mapping_workbench/backend/package_exporter/adapters/cm_exporter.py b/mapping_workbench/backend/package_exporter/adapters/cm_exporter.py
index 2e706d29e..0e680fe28 100644
--- a/mapping_workbench/backend/package_exporter/adapters/cm_exporter.py
+++ b/mapping_workbench/backend/package_exporter/adapters/cm_exporter.py
@@ -6,7 +6,12 @@
import pandas as pd
from pandas import DataFrame
+from mapping_workbench.backend.conceptual_mapping_rule.services.data import \
+ get_conceptual_mapping_rules_with_data_for_project_and_package
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage, MappingPackageState
+from mapping_workbench.backend.mapping_rule_registry.services.data import get_mapping_groups_for_project
+from mapping_workbench.backend.project.models.entity import Project
+from mapping_workbench.backend.resource_collection.services.data import get_resource_files_for_project
class CMExporterException(Exception):
@@ -16,7 +21,7 @@ class CMExporterException(Exception):
class CMExporter(ABC):
@abstractmethod
- def export(self, mapping_package: MappingPackage) -> 'CMExporter':
+ def export_for_package_state(self, mapping_package: MappingPackage) -> 'CMExporter':
pass
@abstractmethod
@@ -78,7 +83,7 @@ def generate_metadata_table(cls, mapping_package_state: MappingPackageState) ->
return metadata_table
@classmethod
- def generate_rules_table(cls, mapping_package_state: MappingPackageState) -> DataFrame:
+ def generate_rules_table(cls, conceptual_mapping_rules) -> DataFrame:
def prepare_notes(notes: List) -> str:
return '; '.join(note.comment for note in notes) if notes else ""
@@ -99,17 +104,17 @@ def prepare_notes(notes: List) -> str:
"Feedback Notes (private)"
])
cm_rules = sorted(
- mapping_package_state.conceptual_mapping_rules, key=lambda x: (x.sort_order is not None, x.sort_order)
+ conceptual_mapping_rules, key=lambda x: (x.sort_order is not None, x.sort_order)
)
for idx, cm_rule in enumerate(cm_rules):
- mapping_groups_ids = map(lambda x: x.name, cm_rule.mapping_groups)
+ mapping_groups_ids = [x.name for x in (cm_rule.mapping_groups or []) if x.name is not None]
rules_table.loc[idx] = [
cm_rule.min_sdk_version,
cm_rule.max_sdk_version,
cm_rule.source_structural_element.sdk_element_id,
cm_rule.source_structural_element.name,
cm_rule.source_structural_element.bt_id,
- ', '.join(mapping_groups_ids),
+ ', '.join(mapping_groups_ids or []),
cm_rule.source_structural_element.absolute_xpath,
cm_rule.xpath_condition,
cm_rule.target_class_path,
@@ -122,7 +127,7 @@ def prepare_notes(notes: List) -> str:
return rules_table
@classmethod
- def generate_mapping_groups_table(cls, mapping_package_state: MappingPackageState) -> DataFrame:
+ def generate_mapping_groups_table(cls, mapping_groups) -> DataFrame:
mapping_groups_table: DataFrame = pd.DataFrame(columns=[
"Mapping Group ID",
"Instance Type (ontology Class)",
@@ -130,7 +135,7 @@ def generate_mapping_groups_table(cls, mapping_package_state: MappingPackageStat
"Instance URI Template",
"TripleMap"
])
- for idx, mapping_group in enumerate(mapping_package_state.mapping_groups):
+ for idx, mapping_group in enumerate(mapping_groups):
mapping_groups_table.loc[idx] = [
mapping_group.name,
mapping_group.class_uri,
@@ -141,22 +146,42 @@ def generate_mapping_groups_table(cls, mapping_package_state: MappingPackageStat
return mapping_groups_table
@classmethod
- def generate_resources_table(cls, mapping_package_state: MappingPackageState) -> DataFrame:
+ def generate_resources_table(cls, resources) -> DataFrame:
filename_col_name = "File name"
resources_table: DataFrame = pd.DataFrame(columns=[filename_col_name])
- for idx, resource in enumerate(mapping_package_state.resources):
+ for idx, resource in enumerate(resources):
resources_table.loc[idx] = [resource.filename]
return resources_table
- def export(self, mapping_package_state: MappingPackageState) -> 'CMExporter':
+ def export_for_package_state(self, mapping_package_state: MappingPackageState) -> 'CMExporter':
if not isinstance(mapping_package_state, MappingPackageState):
raise CMExporterException()
self.cm_tables[self.metadata_table_name] = self.generate_metadata_table(mapping_package_state)
- self.cm_tables[self.rules_table_name] = self.generate_rules_table(mapping_package_state)
- self.cm_tables[self.mapping_groups_table_name] = self.generate_mapping_groups_table(mapping_package_state)
- self.cm_tables[self.resources_table_name] = self.generate_resources_table(mapping_package_state)
+ self.cm_tables[self.rules_table_name] = self.generate_rules_table(
+ mapping_package_state.conceptual_mapping_rules)
+ self.cm_tables[self.mapping_groups_table_name] = self.generate_mapping_groups_table(
+ mapping_package_state.mapping_groups
+ )
+ self.cm_tables[self.resources_table_name] = self.generate_resources_table(
+ mapping_package_state.resources
+ )
+
+ return self
+
+ async def export_for_project(self, project: Project) -> 'CMExporter':
+ if not isinstance(project, Project):
+ raise CMExporterException()
+ self.cm_tables[self.rules_table_name] = self.generate_rules_table(
+ await get_conceptual_mapping_rules_with_data_for_project_and_package(project.id)
+ )
+ self.cm_tables[self.mapping_groups_table_name] = self.generate_mapping_groups_table(
+ await get_mapping_groups_for_project(project.id)
+ )
+ self.cm_tables[self.resources_table_name] = self.generate_resources_table(
+ await get_resource_files_for_project(project_id=project.id)
+ )
return self
@@ -165,6 +190,8 @@ def fetch_excel(self) -> bytes:
with pd.ExcelWriter(output_bytes, engine='xlsxwriter') as excel_writer:
for table_name, table in self.cm_tables.items():
+ if table is None:
+ continue
table.to_excel(excel_writer, sheet_name=table_name, index=False)
output_bytes.seek(0)
diff --git a/mapping_workbench/backend/package_exporter/adapters/eforms/package_state_exporter.py b/mapping_workbench/backend/package_exporter/adapters/eforms/package_state_exporter.py
index 0b4dcc928..e0cda4ffb 100644
--- a/mapping_workbench/backend/package_exporter/adapters/eforms/package_state_exporter.py
+++ b/mapping_workbench/backend/package_exporter/adapters/eforms/package_state_exporter.py
@@ -6,6 +6,7 @@
import pandas as pd
from mapping_workbench.backend.core.adapters.archiver import ZipArchiver, ARCHIVE_ZIP_FORMAT
+from mapping_workbench.backend.core.adapters.exporter import ArchiveExporter
from mapping_workbench.backend.mapping_package.models.entity import MappingPackageState
from mapping_workbench.backend.package_exporter.adapters.mapping_package_hasher import MappingPackageHasher
from mapping_workbench.backend.package_exporter.adapters.mapping_package_reporter import MappingPackageReporter
@@ -23,7 +24,7 @@
from mapping_workbench.backend.user.models.user import User
-class PackageStateExporter:
+class PackageStateExporter(ArchiveExporter):
package_state: MappingPackageState
def __init__(self, package_state: MappingPackageState, project: Project, user: User = None):
@@ -68,14 +69,6 @@ async def export(self) -> bytes:
with open(self.archive_file_path, 'rb') as zip_file:
return zip_file.read()
- @classmethod
- def write_to_file(cls, file_path: Path, file_content: str):
- file_path.write_text(file_content, encoding="utf-8")
-
- @classmethod
- def create_dir(cls, path: Path):
- path.mkdir(parents=True, exist_ok=True)
-
def create_dirs(self):
self.create_dir(self.package_path)
self.create_dir(self.archive_path)
diff --git a/mapping_workbench/backend/package_exporter/services/export_conceptual_mapping.py b/mapping_workbench/backend/package_exporter/services/export_conceptual_mapping.py
index 934db71e0..fdf6ed1c9 100644
--- a/mapping_workbench/backend/package_exporter/services/export_conceptual_mapping.py
+++ b/mapping_workbench/backend/package_exporter/services/export_conceptual_mapping.py
@@ -4,15 +4,22 @@
ConceptualMappingRule
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage, MappingPackageState
from mapping_workbench.backend.package_exporter.adapters.cm_exporter import EFormsCMExporter
+from mapping_workbench.backend.project.models.entity import Project
async def generate_eforms_conceptual_mapping_excel_by_mapping_package_state(
mapping_package_state: MappingPackageState) -> bytes:
cm_exporter = EFormsCMExporter()
- result_excel = cm_exporter.export(mapping_package_state).fetch_excel()
+ result_excel = cm_exporter.export_for_package_state(mapping_package_state).fetch_excel()
return result_excel
async def generate_eforms_conceptual_mapping_excel_by_mapping_package(mapping_package: MappingPackage) -> bytes:
mapping_package_state: MappingPackageState = await mapping_package.get_state()
return await generate_eforms_conceptual_mapping_excel_by_mapping_package_state(mapping_package_state)
+
+
+async def generate_conceptual_mapping_excel_by_project(project: Project) -> bytes:
+ cm_exporter = EFormsCMExporter()
+ result_excel = (await cm_exporter.export_for_project(project)).fetch_excel()
+ return result_excel
diff --git a/mapping_workbench/backend/package_importer/adapters/eforms/importer.py b/mapping_workbench/backend/package_importer/adapters/eforms/importer.py
index 9e263afed..4488ced19 100644
--- a/mapping_workbench/backend/package_importer/adapters/eforms/importer.py
+++ b/mapping_workbench/backend/package_importer/adapters/eforms/importer.py
@@ -50,7 +50,7 @@ async def add_mapping_groups_from_mono(self, mono_package: ImportedMappingSuite)
if not group:
group = MappingGroup(name=mono_group.mapping_group_id)
- group.project = self.project
+ group.project = self.project_link
group.class_uri = mono_group.ontology_class
group.iterator_xpath = mono_group.iterator_xpath
@@ -89,7 +89,7 @@ async def add_mapping_rules_from_mono(self, mono_package: ImportedMappingSuite):
rule: ConceptualMappingRule = await ConceptualMappingRule.find_one(
ConceptualMappingRule.source_structural_element == StructuralElement.link_from_id(
source_structural_element.id),
- ConceptualMappingRule.project == Project.link_from_id(self.project.id),
+ ConceptualMappingRule.project == self.project_link,
ConceptualMappingRule.target_class_path == mono_rule.class_path,
ConceptualMappingRule.target_property_path == mono_rule.property_path
)
@@ -103,7 +103,7 @@ async def add_mapping_rules_from_mono(self, mono_package: ImportedMappingSuite):
)
# rule: ConceptualMappingRule = ConceptualMappingRule()
- rule.project = self.project
+ rule.project = self.project_link
if source_structural_element:
rule.source_structural_element = source_structural_element
diff --git a/mapping_workbench/backend/package_importer/adapters/importer_abc.py b/mapping_workbench/backend/package_importer/adapters/importer_abc.py
index 6fc56b323..0c76d7a46 100644
--- a/mapping_workbench/backend/package_importer/adapters/importer_abc.py
+++ b/mapping_workbench/backend/package_importer/adapters/importer_abc.py
@@ -350,7 +350,7 @@ async def add_mapping_package_from_mono(self, mono_package: ImportedMappingSuite
else:
package = MappingPackage(**metadata)
- package.project = self.project
+ package.project = self.project_link
package.test_data_suites = []
package.shacl_test_suites = []
package.sparql_test_suites = []
diff --git a/mapping_workbench/backend/package_importer/adapters/standard/importer.py b/mapping_workbench/backend/package_importer/adapters/standard/importer.py
index 82f3b0855..96fbafb1d 100644
--- a/mapping_workbench/backend/package_importer/adapters/standard/importer.py
+++ b/mapping_workbench/backend/package_importer/adapters/standard/importer.py
@@ -77,7 +77,7 @@ async def add_mapping_rules_from_mono(self, mono_package: ImportedMappingSuite):
source_structural_element=StructuralElement.link_from_id(source_structural_element.id)
)
- rule.project = self.project
+ rule.project = self.project_link
if source_structural_element:
rule.source_structural_element = source_structural_element
diff --git a/mapping_workbench/backend/package_importer/services/import_mapping_suite.py b/mapping_workbench/backend/package_importer/services/import_mapping_suite.py
index 22fdba18a..4ec1a701f 100644
--- a/mapping_workbench/backend/package_importer/services/import_mapping_suite.py
+++ b/mapping_workbench/backend/package_importer/services/import_mapping_suite.py
@@ -38,9 +38,10 @@ async def import_mapping_package(
)
package: MappingPackage = await importer.import_from_mono_mapping_suite(monolith_mapping_suite)
- task_response.update_result(TaskResultData(
- warnings=importer.warnings
- ))
+ if task_response:
+ task_response.update_result(TaskResultData(
+ warnings=importer.warnings
+ ))
return ImportedMappingSuiteResponse(
mapping_package=package
diff --git a/mapping_workbench/backend/project/adapters/source_files_exporter.py b/mapping_workbench/backend/project/adapters/source_files_exporter.py
new file mode 100644
index 000000000..ddcd55f82
--- /dev/null
+++ b/mapping_workbench/backend/project/adapters/source_files_exporter.py
@@ -0,0 +1,128 @@
+import json
+import tempfile
+from io import StringIO
+from pathlib import Path
+
+import pandas as pd
+
+from mapping_workbench.backend.core.adapters.archiver import ZipArchiver, ARCHIVE_ZIP_FORMAT
+from mapping_workbench.backend.core.adapters.exporter import ArchiveExporter
+from mapping_workbench.backend.package_exporter.adapters.mapping_package_reporter import MappingPackageReporter
+from mapping_workbench.backend.package_exporter.services.export_conceptual_mapping import \
+ generate_eforms_conceptual_mapping_excel_by_mapping_package_state, generate_conceptual_mapping_excel_by_project
+from mapping_workbench.backend.package_importer.services.import_mono_mapping_suite import TEST_DATA_DIR_NAME, \
+ TRANSFORMATION_DIR_NAME, TRANSFORMATION_MAPPINGS_DIR_NAME, TRANSFORMATION_RESOURCES_DIR_NAME, VALIDATION_DIR_NAME, \
+ SHACL_VALIDATION_DIR_NAME, SPARQL_VALIDATION_DIR_NAME, CONCEPTUAL_MAPPINGS_FILE_NAME
+from mapping_workbench.backend.package_validator.models.sparql_validation import SPARQLQueryRefinedResultType
+from mapping_workbench.backend.project.models.entity import Project
+from mapping_workbench.backend.resource_collection.services.data import get_resource_files_for_project
+from mapping_workbench.backend.shacl_test_suite.services.data import get_shacl_test_suites_for_project, \
+ get_shacl_tests_for_suite
+from mapping_workbench.backend.sparql_test_suite.services.data import get_sparql_test_suites_for_project, \
+ get_sparql_tests_for_suite
+from mapping_workbench.backend.test_data_suite.models.entity import TestDataValidation
+from mapping_workbench.backend.test_data_suite.services.data import get_test_data_suites_for_project, \
+ get_test_datas_for_suite
+from mapping_workbench.backend.triple_map_fragment.services.data import get_triple_map_fragments_for_project
+from mapping_workbench.backend.user.models.user import User
+
+
+class SourceFilesExporter(ArchiveExporter):
+
+ def __init__(self, project: Project, user: User = None):
+ self.project = project
+ self.archiver = ZipArchiver()
+ self.user = user
+
+ self.tempdir = tempfile.TemporaryDirectory()
+ tempdir_name = self.tempdir.name
+ self.tempdir_path = Path(tempdir_name)
+
+ self.project_path = self.tempdir_path / str(self.project.id)
+ self.archive_path = self.tempdir_path / "archive"
+ self.archive_file_path = self.archive_path / f"{str(self.project.id)}.{ARCHIVE_ZIP_FORMAT}"
+
+ self.test_data_path = self.project_path / TEST_DATA_DIR_NAME
+ self.transformation_path = self.project_path / "src" / TRANSFORMATION_DIR_NAME
+ self.transformation_mappings_path = self.transformation_path / TRANSFORMATION_MAPPINGS_DIR_NAME
+ self.transformation_resources_path = self.transformation_path / TRANSFORMATION_RESOURCES_DIR_NAME
+ self.validation_path = self.project_path / "src" / VALIDATION_DIR_NAME
+ self.validation_shacl_path = self.validation_path / SHACL_VALIDATION_DIR_NAME
+ self.validation_sparql_path = self.validation_path / SPARQL_VALIDATION_DIR_NAME
+
+ async def export(self) -> bytes:
+ """
+
+ :return:
+ """
+ self.create_dirs()
+ await self.add_transformation_mappings()
+ await self.add_transformation_resources()
+ await self.add_test_data()
+ await self.add_validation_shacl()
+ await self.add_validation_sparql()
+ await self.add_conceptual_mappings()
+
+ self.archiver.make_archive(self.project_path, self.archive_file_path)
+
+ with open(self.archive_file_path, 'rb') as zip_file:
+ return zip_file.read()
+
+ def create_dirs(self):
+ self.create_dir(self.project_path)
+ self.create_dir(self.archive_path)
+ self.create_dir(self.test_data_path)
+ self.create_dir(self.transformation_path)
+ self.create_dir(self.transformation_mappings_path)
+ self.create_dir(self.transformation_resources_path)
+ self.create_dir(self.validation_path)
+ self.create_dir(self.validation_shacl_path)
+ self.create_dir(self.validation_sparql_path)
+
+ async def add_conceptual_mappings(self):
+ filepath = self.transformation_path / CONCEPTUAL_MAPPINGS_FILE_NAME
+ with open(filepath, 'wb') as f:
+ excel_bytes: bytes = await generate_conceptual_mapping_excel_by_project(self.project)
+ f.write(excel_bytes)
+
+ async def add_transformation_mappings(self):
+ triple_map_fragments = await get_triple_map_fragments_for_project(self.project.id)
+ for triple_map_fragment in triple_map_fragments:
+ filename: str = f"{triple_map_fragment.identifier}.{triple_map_fragment.format.value.lower()}" \
+ if triple_map_fragment.identifier else triple_map_fragment.triple_map_uri
+ self.write_to_file(self.transformation_mappings_path / filename, triple_map_fragment.triple_map_content)
+
+ async def add_transformation_resources(self):
+ resources = await get_resource_files_for_project(project_id=self.project.id)
+ for resource in resources:
+ self.write_to_file(self.transformation_resources_path / (resource.filename or resource.title),
+ resource.content)
+
+ async def add_test_data(self):
+ test_data_suites = await get_test_data_suites_for_project(self.project.id)
+ for test_data_suite in test_data_suites:
+ test_data_suite_path = self.test_data_path / test_data_suite.title
+ test_data_suite_path.mkdir(parents=True, exist_ok=True)
+ test_datas = await get_test_datas_for_suite(self.project.id, test_data_suite.id)
+ for test_data in test_datas:
+ self.write_to_file(test_data_suite_path / (test_data.filename or test_data.title), test_data.content)
+
+ async def add_validation_shacl(self):
+ shacl_test_suites = await get_shacl_test_suites_for_project(self.project.id)
+ for shacl_test_suite in shacl_test_suites:
+ shacl_test_suite_path = self.validation_shacl_path / shacl_test_suite.title
+ shacl_test_suite_path.mkdir(parents=True, exist_ok=True)
+ shacl_tests = await get_shacl_tests_for_suite(self.project.id, shacl_test_suite.id)
+ for shacl_test in shacl_tests:
+ self.write_to_file(shacl_test_suite_path / (shacl_test.filename or shacl_test.title),
+ shacl_test.content)
+
+ async def add_validation_sparql(self):
+ sparql_test_suites = await get_sparql_test_suites_for_project(self.project.id)
+ for sparql_test_suite in sparql_test_suites:
+ sparql_test_suite_path = self.validation_sparql_path / sparql_test_suite.title
+ sparql_test_suite_path.mkdir(parents=True, exist_ok=True)
+ sparql_tests = await get_sparql_tests_for_suite(self.project.id, sparql_test_suite.id)
+ for sparql_test in sparql_tests:
+ self.write_to_file(sparql_test_suite_path / (sparql_test.filename or sparql_test.title),
+ sparql_test.content)
diff --git a/mapping_workbench/backend/project/entrypoints/api/routes.py b/mapping_workbench/backend/project/entrypoints/api/routes.py
index 7af54535b..c2cf26ce3 100644
--- a/mapping_workbench/backend/project/entrypoints/api/routes.py
+++ b/mapping_workbench/backend/project/entrypoints/api/routes.py
@@ -1,6 +1,10 @@
+import io
+
from fastapi import APIRouter, Depends, status
+from starlette.responses import StreamingResponse
from mapping_workbench.backend.core.models.api_response import APIEmptyContentWithIdResponse
+from mapping_workbench.backend.core.services.exceptions import ResourceNotFoundException
from mapping_workbench.backend.project.models.entity import ProjectOut, ProjectCreateIn, ProjectUpdateIn, Project
from mapping_workbench.backend.project.models.entity_api_response import APIListProjectsPaginatedResponse
from mapping_workbench.backend.project.services.api import (
@@ -11,6 +15,7 @@
get_project_out,
delete_project
)
+from mapping_workbench.backend.project.services.export_source_files import export_source_files
from mapping_workbench.backend.project.services.tasks import add_task_remove_project_orphan_shareable_resources
from mapping_workbench.backend.security.services.user_manager import current_active_user
from mapping_workbench.backend.user.models.user import User
@@ -108,3 +113,23 @@ async def route_cleanup_project(
user: User = Depends(current_active_user)
):
return add_task_remove_project_orphan_shareable_resources(project.id, user.email).task_metadata
+
+
+@router.post(
+ "/{id}/export_source_files",
+ description=f"Export {NAME_FOR_ONE} source files",
+ name=f"{NAME_FOR_ONE}:export_source_files",
+ status_code=status.HTTP_200_OK
+)
+async def route_export_source_files(
+ project: Project = Depends(get_project),
+):
+ try:
+ archive: bytes = await export_source_files(project)
+ except ResourceNotFoundException as http_exception:
+ raise http_exception
+
+ return StreamingResponse(
+ io.BytesIO(archive),
+ media_type="application/x-zip-compressed"
+ )
diff --git a/mapping_workbench/backend/project/services/export_source_files.py b/mapping_workbench/backend/project/services/export_source_files.py
new file mode 100644
index 000000000..ca9e6b9cc
--- /dev/null
+++ b/mapping_workbench/backend/project/services/export_source_files.py
@@ -0,0 +1,16 @@
+from mapping_workbench.backend.project.adapters.source_files_exporter import SourceFilesExporter
+from mapping_workbench.backend.project.models.entity import Project
+
+
+async def export_source_files(project: Project) -> bytes:
+ """
+
+ :param project:
+ :return:
+ """
+
+ exporter: SourceFilesExporter = SourceFilesExporter(
+ project=project
+ )
+
+ return await exporter.export()
\ No newline at end of file
diff --git a/mapping_workbench/backend/shacl_test_suite/services/data.py b/mapping_workbench/backend/shacl_test_suite/services/data.py
index 16dfa889b..2cfaacc6c 100644
--- a/mapping_workbench/backend/shacl_test_suite/services/data.py
+++ b/mapping_workbench/backend/shacl_test_suite/services/data.py
@@ -1 +1,26 @@
-SHACL_CM_RULES_SUITE_TITLE = "cm_shacl_shapes"
\ No newline at end of file
+from typing import List
+
+from beanie import PydanticObjectId
+from beanie.odm.operators.find.comparison import Eq
+
+from mapping_workbench.backend.project.models.entity import Project
+from mapping_workbench.backend.shacl_test_suite.models.entity import SHACLTestSuite, SHACLTestFileResource
+
+SHACL_CM_RULES_SUITE_TITLE = "cm_shacl_shapes"
+
+async def get_shacl_test_suites_for_project(project_id: PydanticObjectId) -> \
+ List[SHACLTestSuite]:
+ items: List[SHACLTestSuite] = await SHACLTestSuite.find(
+ SHACLTestSuite.project == Project.link_from_id(project_id)
+ ).to_list()
+
+ return items
+
+async def get_shacl_tests_for_suite(project_id: PydanticObjectId, suite_id: PydanticObjectId) -> \
+ List[SHACLTestFileResource]:
+ items: List[SHACLTestFileResource] = await SHACLTestFileResource.find(
+ SHACLTestFileResource.project == Project.link_from_id(project_id),
+ Eq(SHACLTestFileResource.shacl_test_suite, SHACLTestSuite.link_from_id(suite_id))
+ ).to_list()
+
+ return items
\ No newline at end of file
diff --git a/mapping_workbench/backend/sparql_test_suite/services/data.py b/mapping_workbench/backend/sparql_test_suite/services/data.py
index fafd4e75e..af22b9fda 100644
--- a/mapping_workbench/backend/sparql_test_suite/services/data.py
+++ b/mapping_workbench/backend/sparql_test_suite/services/data.py
@@ -1,2 +1,28 @@
+from typing import List
+
+from beanie import PydanticObjectId
+from beanie.odm.operators.find.comparison import Eq
+
+from mapping_workbench.backend.project.models.entity import Project
+from mapping_workbench.backend.sparql_test_suite.models.entity import SPARQLTestSuite, SPARQLTestFileResource
+
SPARQL_CM_ASSERTIONS_SUITE_TITLE = "cm_assertions"
SPARQL_INTEGRATION_TESTS_SUITE_TITLE = "integration_tests"
+
+
+async def get_sparql_test_suites_for_project(project_id: PydanticObjectId) -> \
+ List[SPARQLTestSuite]:
+ items: List[SPARQLTestSuite] = await SPARQLTestSuite.find(
+ SPARQLTestSuite.project == Project.link_from_id(project_id)
+ ).to_list()
+
+ return items
+
+async def get_sparql_tests_for_suite(project_id: PydanticObjectId, suite_id: PydanticObjectId) -> \
+ List[SPARQLTestFileResource]:
+ items: List[SPARQLTestFileResource] = await SPARQLTestFileResource.find(
+ SPARQLTestFileResource.project == Project.link_from_id(project_id),
+ Eq(SPARQLTestFileResource.sparql_test_suite, SPARQLTestSuite.link_from_id(suite_id))
+ ).to_list()
+
+ return items
\ No newline at end of file
diff --git a/mapping_workbench/backend/task_manager/adapters/task_progress.py b/mapping_workbench/backend/task_manager/adapters/task_progress.py
index 1b928c321..5f6ca636b 100644
--- a/mapping_workbench/backend/task_manager/adapters/task_progress.py
+++ b/mapping_workbench/backend/task_manager/adapters/task_progress.py
@@ -43,10 +43,11 @@ def start_action(self, name: str = None, steps_count: int = 0):
def finish_current_action(self):
self.update_current_action_status(TaskProgressStatus.FINISHED)
- self.get_current_action().finished_at = self.current_time()
- self.get_current_action().duration = (
- self.get_current_action().finished_at - self.get_current_action().started_at
- )
+ if self.get_current_action():
+ self.get_current_action().finished_at = self.current_time()
+ self.get_current_action().duration = (
+ self.get_current_action().finished_at - self.get_current_action().started_at
+ )
self.update_task_response()
def add_action(self, action: TaskProgressAction):
@@ -63,8 +64,9 @@ def get_current_action(self) -> TaskProgressAction:
return self.current_action
def update_current_action_status(self, status: TaskProgressStatus):
- self.get_current_action().status = status
- self.update_task_response()
+ if self.get_current_action():
+ self.get_current_action().status = status
+ self.update_task_response()
def start_action_step(self, name: str = None):
self.add_action_step(TaskProgressActionStep(
@@ -76,10 +78,11 @@ def start_action_step(self, name: str = None):
def finish_current_action_step(self):
self.update_current_action_step_status(TaskProgressStatus.FINISHED)
- self.get_current_action_step().finished_at = self.current_time()
- self.get_current_action_step().duration = (
- self.get_current_action_step().finished_at - self.get_current_action_step().started_at
- )
+ if self.get_current_action_step():
+ self.get_current_action_step().finished_at = self.current_time()
+ self.get_current_action_step().duration = (
+ self.get_current_action_step().finished_at - self.get_current_action_step().started_at
+ )
self.update_task_response()
def add_action_step(self, step: TaskProgressActionStep):
@@ -96,8 +99,10 @@ def get_current_action_step(self) -> TaskProgressActionStep:
return self.current_action_step
def update_current_action_step_status(self, status: TaskProgressStatus):
- self.get_current_action_step().status = status
- self.update_task_response()
+ if self.get_current_action_step():
+ self.get_current_action_step().status = status
+ self.update_task_response()
def update_task_response(self):
- self.task_response.update_progress(self.progress)
+ if self.task_response:
+ self.task_response.update_progress(self.progress)
diff --git a/mapping_workbench/backend/test_data_suite/services/data.py b/mapping_workbench/backend/test_data_suite/services/data.py
index 7c6d928bc..e5000e4b2 100644
--- a/mapping_workbench/backend/test_data_suite/services/data.py
+++ b/mapping_workbench/backend/test_data_suite/services/data.py
@@ -10,6 +10,25 @@
TestDataFileResourceFormat
+async def get_test_data_suites_for_project(project_id: PydanticObjectId) -> \
+ List[TestDataSuite]:
+ items: List[TestDataSuite] = await TestDataSuite.find(
+ TestDataSuite.project == Project.link_from_id(project_id)
+ ).to_list()
+
+ return items
+
+
+async def get_test_datas_for_suite(project_id: PydanticObjectId, suite_id: PydanticObjectId) -> \
+ List[TestDataFileResource]:
+ items: List[TestDataFileResource] = await TestDataFileResource.find(
+ TestDataFileResource.project == Project.link_from_id(project_id),
+ Eq(TestDataFileResource.test_data_suite, TestDataSuite.link_from_id(suite_id))
+ ).to_list()
+
+ return items
+
+
async def get_test_data_file_resources_for_project(project_id: PydanticObjectId) -> \
List[TestDataFileResource]:
items: List[TestDataFileResource] = await TestDataFileResource.find(
diff --git a/mapping_workbench/backend/test_data_suite/services/transform_test_data.py b/mapping_workbench/backend/test_data_suite/services/transform_test_data.py
index 9d0d8cef0..837147f88 100644
--- a/mapping_workbench/backend/test_data_suite/services/transform_test_data.py
+++ b/mapping_workbench/backend/test_data_suite/services/transform_test_data.py
@@ -20,7 +20,7 @@
from mapping_workbench.backend.triple_map_fragment.models.entity import TripleMapFragment, TripleMapFragmentState, \
TripleMapFragmentABC
from mapping_workbench.backend.triple_map_fragment.services.data_for_generic import \
- get_generic_triple_map_fragments_for_project_package
+ get_generic_triple_map_fragments_for_project_package, get_generic_triple_map_fragments_for_project
from mapping_workbench.backend.triple_map_fragment.services.data_for_specific import \
get_specific_triple_map_fragments_for_package
from mapping_workbench.backend.user.models.user import User
@@ -70,9 +70,8 @@ async def get_mappings_to_transform_test_data(
project_id: PydanticObjectId,
package_id: PydanticObjectId = None
):
- mappings = []
-
specific_mappings = []
+
if package_id is not None:
generic_mappings = await get_generic_triple_map_fragments_for_project_package(
project_id=project_id,
@@ -82,6 +81,11 @@ async def get_mappings_to_transform_test_data(
project_id=project_id,
package_id=package_id
)
+ else:
+ generic_mappings = await get_generic_triple_map_fragments_for_project(project_id=project_id)
+
+ mappings = generic_mappings + specific_mappings
+
return mappings
diff --git a/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py b/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py
index e885c29d7..c93f3e0bf 100644
--- a/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py
+++ b/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py
@@ -56,7 +56,6 @@ async def update_generic_triple_map_fragment(
PydanticObjectId(package_id) for package_id in
update_data[GenericTripleMapFragment.refers_to_mapping_package_ids]
]
- print(update_data)
return GenericTripleMapFragmentOut(**(
await generic_triple_map_fragment.set(update_data)
).model_dump())
diff --git a/mapping_workbench/backend/triple_map_fragment/services/data.py b/mapping_workbench/backend/triple_map_fragment/services/data.py
new file mode 100644
index 000000000..52c69c40b
--- /dev/null
+++ b/mapping_workbench/backend/triple_map_fragment/services/data.py
@@ -0,0 +1,18 @@
+from typing import List
+
+from beanie import PydanticObjectId
+
+from mapping_workbench.backend.triple_map_fragment.models.entity import TripleMapFragment, GenericTripleMapFragment
+from mapping_workbench.backend.triple_map_fragment.services.data_for_generic import \
+ get_generic_triple_map_fragments_for_project
+from mapping_workbench.backend.triple_map_fragment.services.data_for_specific import \
+ get_specific_triple_map_fragments_for_project
+
+
+async def get_triple_map_fragments_for_project(
+ project_id: PydanticObjectId,
+) -> List[TripleMapFragment]:
+ return (
+ await get_generic_triple_map_fragments_for_project(project_id)
+ + await get_specific_triple_map_fragments_for_project(project_id)
+ )
diff --git a/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py b/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py
index b24e0151a..d808a9b9f 100644
--- a/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py
+++ b/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py
@@ -17,3 +17,13 @@ async def get_generic_triple_map_fragments_for_project_package(
).to_list()
return items
+
+
+async def get_generic_triple_map_fragments_for_project(
+ project_id: PydanticObjectId
+) -> List[GenericTripleMapFragment]:
+ items: List[GenericTripleMapFragment] = await GenericTripleMapFragment.find(
+ GenericTripleMapFragment.project == Project.link_from_id(project_id)
+ ).to_list()
+
+ return items
diff --git a/mapping_workbench/backend/triple_map_fragment/services/data_for_specific.py b/mapping_workbench/backend/triple_map_fragment/services/data_for_specific.py
index 2650dee88..ca9ac2e54 100644
--- a/mapping_workbench/backend/triple_map_fragment/services/data_for_specific.py
+++ b/mapping_workbench/backend/triple_map_fragment/services/data_for_specific.py
@@ -14,3 +14,12 @@ async def get_specific_triple_map_fragments_for_package(project_id: PydanticObje
).to_list()
return items
+
+
+async def get_specific_triple_map_fragments_for_project(project_id: PydanticObjectId) -> \
+ List[SpecificTripleMapFragment]:
+ items: List[SpecificTripleMapFragment] = await SpecificTripleMapFragment.find(
+ SpecificTripleMapFragment.project == Project.link_from_id(project_id)
+ ).to_list()
+
+ return items
diff --git a/mapping_workbench/frontend/src/api/app/index.js b/mapping_workbench/frontend/src/api/app/index.js
index 24a2f7530..67beea417 100644
--- a/mapping_workbench/frontend/src/api/app/index.js
+++ b/mapping_workbench/frontend/src/api/app/index.js
@@ -155,8 +155,8 @@ class AppApi {
return this.request(METHOD.GET, endpoint, null, params, headers, extraConfig);
}
- async post(endpoint, data = {}, params = null, headers = null) {
- return this.request(METHOD.POST, endpoint, data, params, headers);
+ async post(endpoint, data = {}, params = null, headers = null, extraConfig = null) {
+ return this.request(METHOD.POST, endpoint, data, params, headers, extraConfig);
}
async create(endpoint, data, headers = null) {
diff --git a/mapping_workbench/frontend/src/api/mapping-packages/index.js b/mapping_workbench/frontend/src/api/mapping-packages/index.js
index b1fce821a..4bee6cdc2 100644
--- a/mapping_workbench/frontend/src/api/mapping-packages/index.js
+++ b/mapping_workbench/frontend/src/api/mapping-packages/index.js
@@ -65,7 +65,7 @@ class MappingPackagesApi extends SectionApi {
async createDefault(projectId) {
try {
let endpoint = this.paths['create_default'];
- return await appApi.post(endpoint, {}, {project_id: projectId});
+ return await appApi.post(endpoint, {}, {project_id: projectId});
} catch (err) {
}
}
@@ -77,7 +77,6 @@ class MappingPackagesApi extends SectionApi {
return appApi.get(endpoint, params, headers, {
responseType: 'blob'
});
-
}
getLatestState(package_id) {
diff --git a/mapping_workbench/frontend/src/api/projects/index.js b/mapping_workbench/frontend/src/api/projects/index.js
index 298d3bc29..b55e4d5ad 100644
--- a/mapping_workbench/frontend/src/api/projects/index.js
+++ b/mapping_workbench/frontend/src/api/projects/index.js
@@ -1,5 +1,6 @@
import {SectionApi} from "../section";
import {appApi} from "../app";
+import {sessionApi} from "../session";
export const SESSION_PROJECT_KEY = 'sessionProject';
@@ -27,6 +28,13 @@ class ProjectsApi extends SectionApi {
let endpoint = this.paths['cleanup'].replace(':id', project_id);
return await appApi.post(endpoint);
}
+
+ async exportSourceFiles() {
+ let endpoint = this.paths['export_source_files'].replace(':id', sessionApi.getSessionProject());
+ return appApi.post(endpoint, {}, {}, {}, {
+ responseType: 'blob'
+ });
+ }
}
export const projectsApi = new ProjectsApi();
diff --git a/mapping_workbench/frontend/src/pages/app/mapping-packages/index.js b/mapping_workbench/frontend/src/pages/app/mapping-packages/index.js
index d08427790..87cd0331b 100644
--- a/mapping_workbench/frontend/src/pages/app/mapping-packages/index.js
+++ b/mapping_workbench/frontend/src/pages/app/mapping-packages/index.js
@@ -2,6 +2,8 @@ import {useEffect, useState} from 'react';
import AddIcon from '@mui/icons-material/Add';
import UploadIcon from '@mui/icons-material/Upload';
+import DownloadIcon from '@mui/icons-material/Download';
+import XIcon from '@untitled-ui/icons-react/build/esm/X';
import Link from '@mui/material/Link';
import Card from '@mui/material/Card';
@@ -22,7 +24,13 @@ import {ListTable} from "src/sections/app/mapping-package/list-table";
import {TableSearchBar} from "src/sections/components/table-search-bar";
import {BreadcrumbsSeparator} from 'src/components/breadcrumbs-separator';
import {mappingPackagesApi as sectionApi} from 'src/api/mapping-packages';
+import {projectsApi} from 'src/api/projects';
import {PackageImporter} from 'src/sections/app/mapping-package/package-importer';
+import IconButton from "@mui/material/IconButton";
+import DialogContent from "@mui/material/DialogContent";
+import Dialog from "@mui/material/Dialog";
+import {toastError, toastLoad, toastSuccess} from "../../../components/app-toast";
+import {sessionApi} from "../../../api/session";
const useItemsStore = () => {
const [state, setState] = useState({
@@ -56,6 +64,19 @@ const Page = () => {
const itemsSearch = useItemsSearch(itemsStore.items, sectionApi, ['title', 'identifier']);
const importDialog = useDialog();
+ const srcExportDialog = useDialog();
+
+ const exportSourceFiles = () => {
+ const toastId = toastLoad(`Exporting Source Files ... `)
+ projectsApi.exportSourceFiles()
+ .then(response => {
+ const filename = `src_${sessionApi.getSessionProject()}.zip`;
+ saveAs(new Blob([response], {type: "application/x-zip-compressed"}), filename);
+ toastSuccess(`Source Files successfully exported.`, toastId)
+ }).catch(err => toastError(`Exporting Source Files failed: ${err.message}.`, toastId))
+
+ srcExportDialog.handleClose();
+ }
return (
<>
@@ -92,6 +113,18 @@ const Page = () => {
direction="row"
spacing={3}
>
+