diff --git a/mapping_workbench/backend/file_resource/models/file_resource.py b/mapping_workbench/backend/file_resource/models/file_resource.py
index 87eddb6a5..98707c232 100644
--- a/mapping_workbench/backend/file_resource/models/file_resource.py
+++ b/mapping_workbench/backend/file_resource/models/file_resource.py
@@ -7,9 +7,9 @@
from pydantic_core.core_schema import ValidationInfo
from mapping_workbench.backend.core.models.base_mapping_package_resource_entity import \
- BaseMappingPackageResourceSchemaTrait, BaseMappingPackageResourceEntityInSchema
-from mapping_workbench.backend.core.models.base_project_resource_entity import BaseProjectResourceEntity, \
- BaseProjectResourceEntityInSchema, BaseProjectAbleResourceEntity
+ BaseMappingPackageResourceEntityInSchema, BaseMappingPackagesResourceSchemaTrait
+from mapping_workbench.backend.core.models.base_project_resource_entity import BaseProjectResourceEntityInSchema, \
+ BaseProjectAbleResourceEntity
from mapping_workbench.backend.state_manager.models.state_object import ObjectState
@@ -52,7 +52,7 @@ def set_content(cls, v: str, values: ValidationInfo) -> str:
class FileResource(
BaseProjectAbleResourceEntity,
- BaseMappingPackageResourceSchemaTrait
+ BaseMappingPackagesResourceSchemaTrait
):
title: Optional[str] = None
description: Optional[str] = None
@@ -64,7 +64,7 @@ class FileResource(
class FileResourceCollection(
BaseProjectAbleResourceEntity,
- BaseMappingPackageResourceSchemaTrait
+ BaseMappingPackagesResourceSchemaTrait
):
title: Optional[str] = None
description: Optional[str] = None
diff --git a/mapping_workbench/backend/mapping_package/models/entity.py b/mapping_workbench/backend/mapping_package/models/entity.py
index 21d0662dd..ad1ae7d07 100644
--- a/mapping_workbench/backend/mapping_package/models/entity.py
+++ b/mapping_workbench/backend/mapping_package/models/entity.py
@@ -44,6 +44,7 @@ class MappingPackageIn(BaseProjectResourceEntityInSchema):
start_date: Optional[datetime] = None
end_date: Optional[datetime] = None
eforms_sdk_versions: List[str] = None
+ test_data_suites: Optional[List[Optional[Link[TestDataSuite]]]] = None
shacl_test_suites: Optional[List[Optional[Link[SHACLTestSuite]]]] = None
sparql_test_suites: Optional[List[Optional[Link[SPARQLTestSuite]]]] = None
resource_collections: Optional[List[Optional[Link[ResourceCollection]]]] = None
@@ -71,6 +72,7 @@ class MappingPackageOut(BaseProjectResourceEntityOutSchema):
# start_date: Optional[datetime] = None
# end_date: Optional[datetime] = None
eforms_sdk_versions: List[str] = None
+ test_data_suites: Optional[List[Link[TestDataSuite]]] = None
shacl_test_suites: Optional[List[Link[SHACLTestSuite]]] = None
sparql_test_suites: Optional[List[Link[SPARQLTestSuite]]] = None
resource_collections: Optional[List[Link[ResourceCollection]]] = None
@@ -169,6 +171,7 @@ class MappingPackage(BaseProjectResourceEntity, StatefulObjectABC):
end_date: Optional[str] = None
package_type: Optional[PackageType] = DEFAULT_PACKAGE_TYPE
eforms_sdk_versions: Optional[List[str]] = []
+ test_data_suites: Optional[List[Link[TestDataSuite]]] = None
shacl_test_suites: Optional[List[Link[SHACLTestSuite]]] = None
sparql_test_suites: Optional[List[Link[SPARQLTestSuite]]] = None
resource_collections: Optional[List[Link[ResourceCollection]]] = None
@@ -176,7 +179,7 @@ class MappingPackage(BaseProjectResourceEntity, StatefulObjectABC):
async def get_conceptual_mapping_rules_states(self) -> List[ConceptualMappingRuleState]:
conceptual_mapping_rules = await ConceptualMappingRule.find(
Eq(ConceptualMappingRule.refers_to_mapping_package_ids, self.id),
- Eq(ConceptualMappingRule.project, self.project.to_ref())
+ Eq(ConceptualMappingRule.project, self.project)
).to_list()
conceptual_mapping_rule_states = [
await conceptual_mapping_rule.get_state() for conceptual_mapping_rule in conceptual_mapping_rules
@@ -185,7 +188,7 @@ async def get_conceptual_mapping_rules_states(self) -> List[ConceptualMappingRul
async def get_mapping_groups_states(self) -> List[MappingGroupState]:
mapping_groups = await MappingGroup.find(
- Eq(MappingGroup.project, self.project.to_ref())
+ Eq(MappingGroup.project, self.project)
).to_list()
mapping_groups_states = [
await mapping_group.get_state() for mapping_group in mapping_groups
@@ -194,11 +197,18 @@ async def get_mapping_groups_states(self) -> List[MappingGroupState]:
return mapping_groups_states
async def get_test_data_suites_states(self) -> List[TestDataSuiteState]:
+ test_data_suites_states = []
+ test_data_suites_ids = []
+ if self.test_data_suites:
+ test_data_suites_ids = [test_data_suite.to_ref().id for test_data_suite in self.test_data_suites]
test_data_suites = await TestDataSuite.find(
- TestDataSuite.mapping_package_id == self.id,
- Eq(TestDataSuite.project, self.project.to_ref())
+ In(TestDataSuite.id, test_data_suites_ids),
+ Eq(TestDataSuite.project, self.project)
).to_list()
- test_data_suites_states = [await test_data_suite.get_state() for test_data_suite in test_data_suites]
+ if test_data_suites:
+ for test_data_suite in test_data_suites:
+ test_data_suite_state = await test_data_suite.get_state()
+ test_data_suites_states.append(test_data_suite_state)
return test_data_suites_states
async def get_shacl_test_suites_states(self) -> List[SHACLTestSuiteState]:
@@ -208,7 +218,7 @@ async def get_shacl_test_suites_states(self) -> List[SHACLTestSuiteState]:
shacl_test_suites_ids = [shacl_test_suite.to_ref().id for shacl_test_suite in self.shacl_test_suites]
shacl_test_suites = await SHACLTestSuite.find(
In(SHACLTestSuite.id, shacl_test_suites_ids),
- Eq(SHACLTestSuite.project, self.project.to_ref())
+ Eq(SHACLTestSuite.project, self.project)
).to_list()
if shacl_test_suites:
for shacl_test_suite in shacl_test_suites:
@@ -227,7 +237,7 @@ async def get_sparql_test_suites_states(
sparql_test_suites = await SPARQLTestSuite.find(
In(SPARQLTestSuite.id, sparql_test_suites_ids),
NE(SPARQLTestSuite.type, SPARQLQueryValidationType.CM_ASSERTION),
- Eq(SPARQLTestSuite.project, self.project.to_ref())
+ Eq(SPARQLTestSuite.project, self.project)
).to_list()
if sparql_test_suites:
for sparql_test_suite in sparql_test_suites:
@@ -253,10 +263,11 @@ async def get_sparql_test_suites_states(
async def get_triple_map_fragments_states(self) -> List[TripleMapFragmentState]:
triple_map_fragments: List[TripleMapFragment] = (await GenericTripleMapFragment.find(
- Eq(GenericTripleMapFragment.project, self.project.to_ref())
+ Eq(GenericTripleMapFragment.refers_to_mapping_package_ids, self.id),
+ Eq(GenericTripleMapFragment.project, self.project)
).to_list()) + (await SpecificTripleMapFragment.find(
SpecificTripleMapFragment.mapping_package_id == self.id,
- Eq(TestDataSuite.project, self.project.to_ref())
+ Eq(TestDataSuite.project, self.project)
).to_list())
return [await triple_map_fragment.get_state() for triple_map_fragment in triple_map_fragments]
@@ -270,7 +281,7 @@ async def get_resources_states(self) -> List[ResourceFileState]:
]
resource_collections = await ResourceCollection.find(
In(ResourceCollection.id, resource_collections_ids),
- Eq(ResourceCollection.project, self.project.to_ref())
+ Eq(ResourceCollection.project, self.project)
).to_list()
if resource_collections:
for resource_collection in resource_collections:
@@ -280,14 +291,14 @@ async def get_resources_states(self) -> List[ResourceFileState]:
async def get_terms_states(self) -> List[TermState]:
terms = await Term.find(
- Eq(Term.project, self.project.to_ref())
+ Eq(Term.project, self.project)
).to_list()
terms_states = [await term.get_state() for term in terms]
return terms_states
async def get_namespaces_states(self) -> List[NamespaceState]:
namespaces = await Namespace.find(
- Eq(Namespace.project, self.project.to_ref())
+ Eq(Namespace.project, self.project)
).to_list()
namespaces_states = [await namespace.get_state() for namespace in namespaces]
return namespaces_states
diff --git a/mapping_workbench/backend/mapping_package/services/api.py b/mapping_workbench/backend/mapping_package/services/api.py
index 66f5ef59e..9859c6501 100644
--- a/mapping_workbench/backend/mapping_package/services/api.py
+++ b/mapping_workbench/backend/mapping_package/services/api.py
@@ -13,8 +13,8 @@
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage, MappingPackageCreateIn, \
MappingPackageUpdateIn, MappingPackageOut, MappingPackageStateGate
from mapping_workbench.backend.state_manager.services.object_state_manager import delete_object_state
-from mapping_workbench.backend.test_data_suite.models.entity import TestDataFileResource, TestDataSuite
-from mapping_workbench.backend.triple_map_fragment.models.entity import SpecificTripleMapFragment
+from mapping_workbench.backend.triple_map_fragment.models.entity import SpecificTripleMapFragment, \
+ GenericTripleMapFragment
from mapping_workbench.backend.user.models.user import User
@@ -103,23 +103,17 @@ async def remove_mapping_package_resources(mapping_package: MappingPackage):
for resource_type in resources_to_delete:
await delete_mapping_package_resource_by_type(resource_type, project_link, package_id)
- test_data_suites = await TestDataSuite.find(
- TestDataSuite.mapping_package_id == package_id,
- TestDataSuite.project == project_link
- ).to_list()
- if test_data_suites:
- for test_data_suite in test_data_suites:
- await TestDataFileResource.find(
- TestDataFileResource.test_data_suite == TestDataSuite.link_from_id(test_data_suite.id),
- TestDataFileResource.project == project_link
- ).delete()
- await test_data_suite.delete()
-
await ConceptualMappingRule.find(
ConceptualMappingRule.project == project_link
).update_many(
Pull({ConceptualMappingRule.refers_to_mapping_package_ids: package_id})
)
+ print(Pull({GenericTripleMapFragment.refers_to_mapping_package_ids: package_id}))
+ await GenericTripleMapFragment.find(
+ GenericTripleMapFragment.project == project_link
+ ).update_many(
+ Pull({GenericTripleMapFragment.refers_to_mapping_package_ids: package_id})
+ )
# Mapping Package States
diff --git a/mapping_workbench/backend/mapping_package/services/link.py b/mapping_workbench/backend/mapping_package/services/link.py
index 3cf948a59..53de45892 100644
--- a/mapping_workbench/backend/mapping_package/services/link.py
+++ b/mapping_workbench/backend/mapping_package/services/link.py
@@ -3,53 +3,79 @@
from beanie import PydanticObjectId
from beanie.odm.operators.find.comparison import In
+from beanie.odm.operators.update.array import Pull
+from beanie.odm.operators.update.general import Set
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
from mapping_workbench.backend.project.models.entity import Project
+from mapping_workbench.backend.resource_collection.models.entity import ResourceCollection
+from mapping_workbench.backend.shacl_test_suite.models.entity import SHACLTestSuite
+from mapping_workbench.backend.sparql_test_suite.models.entity import SPARQLTestSuite
+from mapping_workbench.backend.test_data_suite.models.entity import TestDataSuite
class ResourceField(Enum):
SHACL_TEST_SUITES = "shacl_test_suites"
SPARQL_TEST_SUITES = "sparql_test_suites"
RESOURCE_COLLECTIONS = "resource_collections"
+ TEST_DATA_SUITES = "test_data_suites"
# GENERIC_TRIPLE_MAPS = "generic_triple_maps"
- # TEST_DATA_SUITES = "test_data_suites"
-async def assign_mapping_package_to_resources(
+def resource_link_from_id(resources_field: ResourceField, resource_id):
+ if resources_field == ResourceField.TEST_DATA_SUITES:
+ return TestDataSuite.link_from_id(resource_id)
+ if resources_field == ResourceField.SPARQL_TEST_SUITES:
+ return SPARQLTestSuite.link_from_id(resource_id)
+ if resources_field == ResourceField.SHACL_TEST_SUITES:
+ return SHACLTestSuite.link_from_id(resource_id)
+ if resources_field == ResourceField.RESOURCE_COLLECTIONS:
+ return ResourceCollection.link_from_id(resource_id)
+
+
+async def assign_resources_to_mapping_packages(
project_id: PydanticObjectId,
- mapping_package_id: PydanticObjectId,
- resource_model, # Document
- resources_ids: List[PydanticObjectId]
+ # resource_model: Document,
+ resources_ids: List[PydanticObjectId],
+ resources_field: ResourceField,
+ mapping_packages_ids: List[PydanticObjectId]
):
- await resource_model.get_motor_collection().update_many(
- {
- resource_model.project: Project.link_from_id(project_id).to_ref(),
- resource_model.id: {In.operator: resources_ids}
- },
+ query_filter: dict = {
+ MappingPackage.project: Project.link_from_id(project_id).to_ref(),
+ MappingPackage.id: {In.operator: mapping_packages_ids}
+ }
+
+ await MappingPackage.get_motor_collection().update_many(
+ query_filter,
{
- "$set": {
- resource_model.mapping_package_id: mapping_package_id
+ Set.operator: {
+ resources_field.value: [
+ resource_link_from_id(resources_field, resource_id).to_ref() for resource_id in resources_ids
+ ]
}
}
)
-async def assign_resources_to_mapping_packages(
+async def unassign_resources_from_mapping_packages(
project_id: PydanticObjectId,
- # resource_model: Document,
resources_ids: List[PydanticObjectId],
resources_field: ResourceField,
- mapping_packages_ids: List[PydanticObjectId]
+ mapping_packages_ids: List[PydanticObjectId] = None
):
+ query_filter: dict = {
+ MappingPackage.project: Project.link_from_id(project_id).to_ref()
+ }
+ if mapping_packages_ids:
+ query_filter[MappingPackage.id] = {In.operator: mapping_packages_ids}
+
await MappingPackage.get_motor_collection().update_many(
+ query_filter,
{
- MappingPackage.project: Project.link_from_id(project_id).to_ref(),
- MappingPackage.id: {In.operator: mapping_packages_ids}
- },
- {
- "$set": {
- resources_field.value: resources_ids
+ Pull.operator: {
+ resources_field.value: {In.operator: [
+ resource_link_from_id(resources_field, resource_id).to_ref() for resource_id in resources_ids
+ ]}
}
}
)
diff --git a/mapping_workbench/backend/ontology_file_collection/__init__.py b/mapping_workbench/backend/ontology_file_collection/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/mapping_workbench/backend/ontology_file_collection/entrypoints/api/__init__.py b/mapping_workbench/backend/ontology_file_collection/entrypoints/api/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/mapping_workbench/backend/ontology_file_collection/entrypoints/api/routes.py b/mapping_workbench/backend/ontology_file_collection/entrypoints/api/routes.py
deleted file mode 100644
index 6ce164095..000000000
--- a/mapping_workbench/backend/ontology_file_collection/entrypoints/api/routes.py
+++ /dev/null
@@ -1,203 +0,0 @@
-# from beanie import PydanticObjectId
-# from fastapi import APIRouter, status, Depends
-# from starlette.requests import Request
-#
-# from mapping_workbench.backend.core.models.api_response import APIEmptyContentWithIdResponse
-# from mapping_workbench.backend.file_resource.services.file_resource_form_data import \
-# file_resource_data_from_form_request
-# from mapping_workbench.backend.ontology_file_collection.models.entity import OntologyFileCollection, \
-# OntologyFileResource, OntologyFileResourceCreateIn, OntologyFileResourceUpdateIn
-# from mapping_workbench.backend.ontology_file_collection.models.entity_api_response import \
-# APIListOntologyFileCollectionsPaginatedResponse, APIListOntologyFileResourcesPaginatedResponse
-# from mapping_workbench.backend.ontology_file_collection.services.api import (
-# list_ontology_file_collections,
-# create_ontology_file_collection,
-# update_ontology_file_collection,
-# get_ontology_file_collection,
-# delete_ontology_file_collection,
-# list_ontology_file_collection_file_resources,
-# create_ontology_file_collection_file_resource,
-# update_ontology_file_resource,
-# get_ontology_file_resource,
-# delete_ontology_file_resource
-# )
-# from mapping_workbench.backend.project.models.entity import Project
-# from mapping_workbench.backend.project.services.api import get_project
-# from mapping_workbench.backend.security.services.user_manager import current_active_user
-# from mapping_workbench.backend.user.models.user import User
-#
-# ROUTE_PREFIX = "/ontology_file_collections"
-# TAG = "ontology_file_collections"
-# NAME_FOR_MANY = "ontology_file_collections"
-# NAME_FOR_ONE = "ontology_file_collection"
-# FILE_RESOURCE_NAME_FOR_MANY = "ontology_file_resources"
-# FILE_RESOURCE_NAME_FOR_ONE = "ontology_file_resource"
-#
-# router = APIRouter(
-# prefix=ROUTE_PREFIX,
-# tags=[TAG]
-# )
-#
-#
-# @router.get(
-# "",
-# description=f"List {NAME_FOR_MANY}",
-# name=f"{NAME_FOR_MANY}:list",
-# response_model=APIListOntologyFileCollectionsPaginatedResponse
-# )
-# async def route_list_ontology_file_collections(
-# project: PydanticObjectId = None,
-# page: int = None,
-# limit: int = None,
-# q: str = None
-# ):
-# filters: dict = {}
-# if project:
-# filters['project'] = Project.link_from_id(project)
-# if q is not None:
-# filters['q'] = q
-#
-# await get_project(project)
-#
-# items, total_count = await list_ontology_file_collections(filters, page, limit)
-# return APIListOntologyFileCollectionsPaginatedResponse(
-# items=items,
-# count=total_count
-# )
-#
-#
-# @router.post(
-# "",
-# description=f"Create {NAME_FOR_ONE}",
-# name=f"{NAME_FOR_MANY}:create_{NAME_FOR_ONE}",
-# response_model=OntologyFileCollection,
-# status_code=status.HTTP_201_CREATED
-# )
-# async def route_create_ontology_file_collection(
-# ontology_file_collection: OntologyFileCollection,
-# user: User = Depends(current_active_user)
-# ):
-# return await create_ontology_file_collection(ontology_file_collection, user=user)
-#
-#
-# @router.patch(
-# "/{id}",
-# description=f"Update {NAME_FOR_ONE}",
-# name=f"{NAME_FOR_MANY}:update_{NAME_FOR_ONE}",
-# response_model=OntologyFileCollection
-# )
-# async def route_update_ontology_file_collection(
-# data: OntologyFileCollection,
-# ontology_file_collection: OntologyFileCollection = Depends(get_ontology_file_collection),
-# user: User = Depends(current_active_user)
-# ):
-# return await update_ontology_file_collection(ontology_file_collection, data, user=user)
-#
-#
-# @router.get(
-# "/{id}",
-# description=f"Get {NAME_FOR_ONE}",
-# name=f"{NAME_FOR_MANY}:get_{NAME_FOR_ONE}",
-# response_model=OntologyFileCollection
-# )
-# async def route_get_ontology_file_collection(
-# ontology_file_collection: OntologyFileCollection = Depends(get_ontology_file_collection)):
-# return ontology_file_collection
-#
-#
-# @router.delete(
-# "/{id}",
-# description=f"Delete {NAME_FOR_ONE}",
-# name=f"{NAME_FOR_MANY}:delete_{NAME_FOR_ONE}",
-# response_model=APIEmptyContentWithIdResponse
-# )
-# async def route_delete_ontology_file_collection(
-# ontology_file_collection: OntologyFileCollection = Depends(get_ontology_file_collection)):
-# await delete_ontology_file_collection(ontology_file_collection)
-# return APIEmptyContentWithIdResponse(id=ontology_file_collection.id)
-#
-#
-# @router.get(
-# "/{id}/file_resources",
-# description=f"List {FILE_RESOURCE_NAME_FOR_MANY}",
-# name=f"{FILE_RESOURCE_NAME_FOR_MANY}:list_{FILE_RESOURCE_NAME_FOR_MANY}",
-# response_model=APIListOntologyFileResourcesPaginatedResponse
-# )
-# async def route_list_ontology_file_collection_file_resources(
-# ontology_file_collection: OntologyFileCollection = Depends(get_ontology_file_collection),
-# project: PydanticObjectId = None,
-# page: int = None,
-# limit: int = None,
-# q: str = None
-# ):
-# filters: dict = {}
-# if project:
-# filters['project'] = Project.link_from_id(project)
-# if q is not None:
-# filters['q'] = q
-#
-# items, total_count = \
-# await list_ontology_file_collection_file_resources(ontology_file_collection, filters, page, limit)
-# return APIListOntologyFileResourcesPaginatedResponse(
-# items=items,
-# count=total_count
-# )
-#
-#
-# @router.post(
-# "/{id}/file_resources",
-# description=f"Create {FILE_RESOURCE_NAME_FOR_ONE}",
-# name=f"{FILE_RESOURCE_NAME_FOR_MANY}:create_{FILE_RESOURCE_NAME_FOR_ONE}",
-# response_model=OntologyFileResource,
-# status_code=status.HTTP_201_CREATED
-# )
-# async def route_create_ontology_file_collection_file_resources(
-# req: Request,
-# ontology_file_collection: OntologyFileCollection = Depends(get_ontology_file_collection),
-# user: User = Depends(current_active_user)
-# ):
-# data = OntologyFileResourceCreateIn(**(await file_resource_data_from_form_request(req)))
-# return await create_ontology_file_collection_file_resource(
-# ontology_file_collection=ontology_file_collection,
-# data=data,
-# user=user
-# )
-#
-#
-# @router.patch(
-# "/file_resources/{id}",
-# description=f"Update {FILE_RESOURCE_NAME_FOR_ONE}",
-# name=f"{FILE_RESOURCE_NAME_FOR_MANY}:update_{FILE_RESOURCE_NAME_FOR_ONE}",
-# response_model=OntologyFileResource
-# )
-# async def route_update_ontology_file_resource(
-# req: Request,
-# ontology_file_resource: OntologyFileResource = Depends(get_ontology_file_resource),
-# user: User = Depends(current_active_user)
-# ):
-# data = OntologyFileResourceUpdateIn(**(await file_resource_data_from_form_request(req)))
-# return await update_ontology_file_resource(ontology_file_resource, data, user=user)
-#
-#
-# @router.get(
-# "/file_resources/{id}",
-# description=f"Get {FILE_RESOURCE_NAME_FOR_ONE}",
-# name=f"{FILE_RESOURCE_NAME_FOR_MANY}:get_{FILE_RESOURCE_NAME_FOR_ONE}",
-# response_model=OntologyFileResource
-# )
-# async def route_get_ontology_file_resource(
-# ontology_file_resource: OntologyFileResource = Depends(get_ontology_file_resource)
-# ):
-# return ontology_file_resource
-#
-#
-# @router.delete(
-# "/file_resources/{id}",
-# description=f"Delete {FILE_RESOURCE_NAME_FOR_ONE}",
-# name=f"{FILE_RESOURCE_NAME_FOR_MANY}:delete_{FILE_RESOURCE_NAME_FOR_ONE}",
-# response_model=APIEmptyContentWithIdResponse
-# )
-# async def route_delete_ontology_file_resource(
-# ontology_file_resource: OntologyFileResource = Depends(get_ontology_file_resource)):
-# await delete_ontology_file_resource(ontology_file_resource)
-# return APIEmptyContentWithIdResponse(id=ontology_file_resource.id)
diff --git a/mapping_workbench/backend/ontology_file_collection/models/__init__.py b/mapping_workbench/backend/ontology_file_collection/models/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/mapping_workbench/backend/ontology_file_collection/models/entity.py b/mapping_workbench/backend/ontology_file_collection/models/entity.py
deleted file mode 100644
index e2b079a91..000000000
--- a/mapping_workbench/backend/ontology_file_collection/models/entity.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# from enum import Enum
-# from typing import Optional, List
-#
-# import pymongo
-# from beanie import Link
-# from pymongo import IndexModel
-#
-# from mapping_workbench.backend.core.models.base_project_resource_entity import BaseProjectResourceEntity
-# from mapping_workbench.backend.file_resource.models.file_resource import FileResource, FileResourceCollection, \
-# FileResourceIn
-#
-#
-# class OntologyFileCollection(FileResourceCollection):
-# file_resources: Optional[List[Link["OntologyFileResource"]]] = []
-#
-# class Settings(BaseProjectResourceEntity.Settings):
-# name = "ontology_file_collections"
-#
-# indexes = [
-# IndexModel(
-# [
-# ("title", pymongo.TEXT),
-# ("description", pymongo.TEXT),
-# ("path", pymongo.TEXT)
-# ],
-# name="search_text_idx"
-# )
-# ]
-#
-#
-# class OntologyFileResourceFormat(Enum):
-# OWL = "OWL"
-# RDF = "RDF"
-#
-#
-# class OntologyFileResourceIn(FileResourceIn):
-# format: Optional[OntologyFileResourceFormat] = None
-#
-#
-# class OntologyFileResourceCreateIn(OntologyFileResourceIn):
-# ontology_file_collection: Optional[Link[OntologyFileCollection]] = None
-#
-#
-# class OntologyFileResourceUpdateIn(OntologyFileResourceIn):
-# pass
-#
-#
-# class OntologyFileResource(FileResource):
-# format: Optional[OntologyFileResourceFormat] = None
-# ontology_file_collection: Optional[Link[OntologyFileCollection]] = None
-#
-# class Settings(FileResource.Settings):
-# name = "ontology_file_resources"
-#
-# indexes = [
-# IndexModel(
-# [
-# ("title", pymongo.TEXT),
-# ("description", pymongo.TEXT),
-# ("filename", pymongo.TEXT),
-# ("path", pymongo.TEXT),
-# ("format", pymongo.TEXT),
-# ("content", pymongo.TEXT)
-# ],
-# name="search_text_idx"
-# )
-# ]
diff --git a/mapping_workbench/backend/ontology_file_collection/models/entity_api_response.py b/mapping_workbench/backend/ontology_file_collection/models/entity_api_response.py
deleted file mode 100644
index 57af2cc65..000000000
--- a/mapping_workbench/backend/ontology_file_collection/models/entity_api_response.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# from typing import List
-#
-# from mapping_workbench.backend.core.models.api_response import APIListPaginatedResponse
-# from mapping_workbench.backend.ontology_file_collection.models.entity import OntologyFileResource, \
-# OntologyFileCollection
-#
-#
-# class APIListOntologyFileCollectionsPaginatedResponse(APIListPaginatedResponse):
-# items: List[OntologyFileCollection]
-#
-#
-# class APIListOntologyFileResourcesPaginatedResponse(APIListPaginatedResponse):
-# items: List[OntologyFileResource]
diff --git a/mapping_workbench/backend/ontology_file_collection/services/__init__.py b/mapping_workbench/backend/ontology_file_collection/services/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/mapping_workbench/backend/ontology_file_collection/services/api.py b/mapping_workbench/backend/ontology_file_collection/services/api.py
deleted file mode 100644
index 8921e7b18..000000000
--- a/mapping_workbench/backend/ontology_file_collection/services/api.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# from typing import List
-#
-# from beanie import PydanticObjectId
-#
-# from mapping_workbench.backend.core.models.base_entity import BaseEntityFiltersSchema
-# from mapping_workbench.backend.core.services.exceptions import ResourceNotFoundException
-# from mapping_workbench.backend.core.services.request import request_update_data, api_entity_is_found, \
-# request_create_data, prepare_search_param, pagination_params
-# from mapping_workbench.backend.ontology_file_collection.models.entity import OntologyFileCollection, \
-# OntologyFileResource, OntologyFileResourceCreateIn, OntologyFileResourceUpdateIn
-# from mapping_workbench.backend.user.models.user import User
-#
-#
-# async def list_ontology_file_collections(filters: dict = None, page: int = None, limit: int = None) -> \
-# (List[OntologyFileCollection], int):
-# query_filters: dict = dict(filters or {}) | dict(BaseEntityFiltersSchema())
-#
-# prepare_search_param(query_filters)
-# skip, limit = pagination_params(page, limit)
-#
-# items: List[OntologyFileCollection] = await OntologyFileCollection.find(
-# query_filters,
-# projection_model=OntologyFileCollection,
-# fetch_links=False,
-# skip=skip,
-# limit=limit
-# ).to_list()
-# total_count: int = await OntologyFileCollection.find(query_filters).count()
-# return items, total_count
-#
-#
-# async def create_ontology_file_collection(
-# ontology_file_collection: OntologyFileCollection,
-# user: User
-# ) -> OntologyFileCollection:
-# ontology_file_collection.on_create(user=user)
-# return await ontology_file_collection.create()
-#
-#
-# async def update_ontology_file_collection(
-# ontology_file_collection: OntologyFileCollection,
-# data: OntologyFileCollection,
-# user: User
-# ):
-# return await ontology_file_collection.set(
-# request_update_data(data, user=user)
-# )
-#
-#
-# async def get_ontology_file_collection(id: PydanticObjectId) -> OntologyFileCollection:
-# ontology_file_collection: OntologyFileCollection = await OntologyFileCollection.get(id)
-# if not api_entity_is_found(ontology_file_collection):
-# raise ResourceNotFoundException()
-# return ontology_file_collection
-#
-#
-# async def delete_ontology_file_collection(ontology_file_collection: OntologyFileCollection):
-# return await ontology_file_collection.delete()
-#
-#
-# async def list_ontology_file_collection_file_resources(
-# ontology_file_collection: OntologyFileCollection,
-# filters=None, page: int = None, limit: int = None
-# ) -> (List[OntologyFileResource], int):
-# query_filters: dict = dict(filters or {}) | dict(BaseEntityFiltersSchema())
-# query_filters['ontology_file_collection'] = OntologyFileCollection.link_from_id(ontology_file_collection.id)
-#
-# prepare_search_param(query_filters)
-# skip, limit = pagination_params(page, limit)
-#
-# items: List[OntologyFileResource] = await OntologyFileResource.find(
-# query_filters,
-# fetch_links=False,
-# skip=skip,
-# limit=limit
-# ).to_list()
-# total_count: int = await OntologyFileResource.find(query_filters).count()
-# return items, total_count
-#
-#
-# async def create_ontology_file_collection_file_resource(
-# ontology_file_collection: OntologyFileCollection,
-# data: OntologyFileResourceCreateIn,
-# user: User
-# ) -> OntologyFileResource:
-# data.ontology_file_collection = ontology_file_collection
-# ontology_file_resource = \
-# OntologyFileResource(
-# **request_create_data(data, user=user)
-# )
-# return await ontology_file_resource.create()
-#
-#
-# async def update_ontology_file_resource(
-# ontology_file_resource: OntologyFileResource,
-# data: OntologyFileResourceUpdateIn,
-# user: User) -> OntologyFileResource:
-# return await ontology_file_resource.set(
-# request_update_data(data, user=user)
-# )
-#
-#
-# async def get_ontology_file_resource(id: PydanticObjectId) -> OntologyFileResource:
-# ontology_file_resource = await OntologyFileResource.get(id)
-# if not api_entity_is_found(ontology_file_resource):
-# raise ResourceNotFoundException()
-# return ontology_file_resource
-#
-#
-# async def delete_ontology_file_resource(ontology_file_resource: OntologyFileResource):
-# return await ontology_file_resource.delete()
diff --git a/mapping_workbench/backend/package_importer/adapters/importer_abc.py b/mapping_workbench/backend/package_importer/adapters/importer_abc.py
index 43cae816a..6fc56b323 100644
--- a/mapping_workbench/backend/package_importer/adapters/importer_abc.py
+++ b/mapping_workbench/backend/package_importer/adapters/importer_abc.py
@@ -2,6 +2,8 @@
from pathlib import Path
from typing import Dict, Tuple, List
+from beanie.odm.operators.find.comparison import Eq
+
from mapping_workbench.backend.conceptual_mapping_rule.models.entity import ConceptualMappingRule
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
from mapping_workbench.backend.mapping_rule_registry.models.entity import MappingGroup
@@ -18,7 +20,7 @@
from mapping_workbench.backend.sparql_test_suite.services.data import SPARQL_CM_ASSERTIONS_SUITE_TITLE, \
SPARQL_INTEGRATION_TESTS_SUITE_TITLE
from mapping_workbench.backend.task_manager.adapters.task_progress import TaskProgress
-from mapping_workbench.backend.tasks.models.task_response import TaskResponse, TaskProgressAction, TaskProgressStatus
+from mapping_workbench.backend.tasks.models.task_response import TaskResponse
from mapping_workbench.backend.test_data_suite.models.entity import TestDataSuite, TestDataFileResource, \
TestDataFileResourceFormat
from mapping_workbench.backend.triple_map_fragment.models.entity import TripleMapFragmentFormat, \
@@ -56,19 +58,20 @@ async def add_test_data_from_mono(self, mono_package: ImportedMappingSuite):
for mono_resource_collection in mono_package.test_data_resources:
test_data_suite: TestDataSuite = await TestDataSuite.find_one(
TestDataSuite.project == self.project_link,
- TestDataSuite.title == mono_resource_collection.name,
- TestDataSuite.mapping_package_id == self.package.id
+ TestDataSuite.title == mono_resource_collection.name
)
if not test_data_suite:
test_data_suite = TestDataSuite(
project=self.project,
- mapping_package_id=self.package.id,
title=mono_resource_collection.name
)
-
await test_data_suite.on_create(self.user).save()
+ test_data_suite_link = TestDataSuite.link_from_id(test_data_suite.id)
+ if test_data_suite_link not in self.package.test_data_suites:
+ self.package.test_data_suites.append(test_data_suite_link)
+
for mono_file_resource in mono_resource_collection.file_resources:
resource_path = [mono_resource_collection.name]
resource_name = mono_file_resource.name
@@ -118,23 +121,24 @@ async def add_transformation_mappings_from_mono(self, mono_package: ImportedMapp
continue
resource_content = mono_file_resource.content
- triple_map_fragment = await SpecificTripleMapFragment.find_one(
- SpecificTripleMapFragment.project == self.project_link,
- SpecificTripleMapFragment.mapping_package_id == self.package.id,
- SpecificTripleMapFragment.triple_map_uri == resource_name
+ triple_map_fragment = await GenericTripleMapFragment.find_one(
+ GenericTripleMapFragment.project == self.project_link,
+ GenericTripleMapFragment.triple_map_uri == resource_name
)
if not triple_map_fragment:
- triple_map_fragment = SpecificTripleMapFragment(
+ triple_map_fragment = GenericTripleMapFragment(
triple_map_uri=resource_name,
triple_map_content=resource_content,
format=resource_format,
project=self.project,
- mapping_package_id=self.package.id
+ refers_to_mapping_package_ids=[self.package.id]
)
await triple_map_fragment.on_create(self.user).save()
else:
triple_map_fragment.triple_map_content = resource_content
+ if self.package.id not in triple_map_fragment.refers_to_mapping_package_ids:
+ triple_map_fragment.refers_to_mapping_package_ids.append(self.package.id)
await triple_map_fragment.on_update(self.user).save()
self.task_progress.finish_current_action_step()
@@ -347,6 +351,7 @@ async def add_mapping_package_from_mono(self, mono_package: ImportedMappingSuite
package = MappingPackage(**metadata)
package.project = self.project
+ package.test_data_suites = []
package.shacl_test_suites = []
package.sparql_test_suites = []
package.resource_collections = []
diff --git a/mapping_workbench/backend/package_processor/entrypoints/api/routes.py b/mapping_workbench/backend/package_processor/entrypoints/api/routes.py
index eeb137a31..ab1d2ba75 100644
--- a/mapping_workbench/backend/package_processor/entrypoints/api/routes.py
+++ b/mapping_workbench/backend/package_processor/entrypoints/api/routes.py
@@ -1,11 +1,10 @@
+from typing import Optional
+
from beanie import PydanticObjectId
from fastapi import APIRouter, status, Depends, Form
-from mapping_workbench.backend.mapping_package.models.entity import MappingPackage, MappingPackageOut
-from mapping_workbench.backend.package_processor.services import tasks
from mapping_workbench.backend.package_processor.services.tasks import add_task_process_mapping_package
from mapping_workbench.backend.security.services.user_manager import current_active_user
-from mapping_workbench.backend.task_manager.services.task_wrapper import add_task
from mapping_workbench.backend.user.models.user import User
ROUTE_PREFIX = "/package_processor"
@@ -28,13 +27,13 @@
)
async def route_task_process_package(
package_id: PydanticObjectId = Form(...),
- use_latest_package_state: bool = Form(...),
- tasks_to_run: str = Form(...),
+ use_only_package_state: bool = Form(...),
+ tasks_to_run: Optional[str] = Form(default=None),
user: User = Depends(current_active_user)
):
return (await add_task_process_mapping_package(
package_id=package_id,
user=user,
- use_latest_package_state=use_latest_package_state,
- tasks_to_run=tasks_to_run.split(',')
+ use_only_package_state=use_only_package_state,
+ tasks_to_run=tasks_to_run.split(',') if tasks_to_run else None
)).task_metadata
diff --git a/mapping_workbench/backend/package_processor/services/mapping_package_processor.py b/mapping_workbench/backend/package_processor/services/mapping_package_processor.py
index ba2c0525e..90a30637f 100644
--- a/mapping_workbench/backend/package_processor/services/mapping_package_processor.py
+++ b/mapping_workbench/backend/package_processor/services/mapping_package_processor.py
@@ -37,7 +37,7 @@ async def create_mapping_package_state(mapping_package: MappingPackage):
async def process_mapping_package(
package_id: PydanticObjectId,
- use_latest_package_state: bool = False,
+ use_only_package_state: bool = False,
tasks_to_run: List[str] = None,
user: User = None,
task_response: TaskResponse = None
@@ -45,7 +45,7 @@ async def process_mapping_package(
"""
:param task_response:
- :param use_latest_package_state:
+ :param use_only_package_state:
:param tasks_to_run:
:param package_id:
:param user:
@@ -59,10 +59,11 @@ async def process_mapping_package(
task_progress.start_progress(actions_count=1)
steps_count = 2
- if tasks_to_run is None:
- steps_count += len(TaskToRun) - COMPOUND_TASKS_COUNT
- else:
- steps_count += len(tasks_to_run) - len(set([x.value for x in COMPOUND_TASKS]) & set(tasks_to_run))
+ if not use_only_package_state:
+ if tasks_to_run is None:
+ steps_count += len(TaskToRun) - COMPOUND_TASKS_COUNT
+ else:
+ steps_count += len(tasks_to_run) - len(set([x.value for x in COMPOUND_TASKS]) & set(tasks_to_run))
task_progress.start_action(
name="Process Package",
@@ -79,24 +80,25 @@ async def process_mapping_package(
task_progress.finish_current_action_step()
mwb_logger.log_all_info("Initializing Package State ... DONE")
- if tasks_to_run is None or TaskToRun.TRANSFORM_TEST_DATA.value in tasks_to_run:
- mwb_logger.log_all_info(f"Transforming '{mapping_package.identifier}' Test Data ...")
- task_progress.start_action_step(name=TaskToRun.TRANSFORM_TEST_DATA.value)
- await transform_mapping_package_state(mapping_package_state=mapping_package_state)
- task_progress.finish_current_action_step()
- mwb_logger.log_all_info(f"Transforming '{mapping_package.identifier}' Test Data ... DONE")
-
- if tasks_to_run is None or TaskToRun.GENERATE_CM_ASSERTIONS.value in tasks_to_run:
- mwb_logger.log_all_info("Generating CM Assertions Queries ...")
- task_progress.start_action_step(name=TaskToRun.GENERATE_CM_ASSERTIONS.value)
- await generate_cm_assertions_queries_for_package_state(mapping_package_state=mapping_package_state)
- task_progress.finish_current_action_step()
- mwb_logger.log_all_info("Generating CM Assertions Queries ... DONE")
-
- if tasks_to_run is None or TaskToRun.VALIDATE_PACKAGE.value in tasks_to_run:
- mwb_logger.log_all_info("Validating Package State ...")
- await validate_mapping_package(mapping_package_state, tasks_to_run, task_progress=task_progress)
- mwb_logger.log_all_info("Validating Package State ... DONE")
+ if not use_only_package_state:
+ if tasks_to_run is None or TaskToRun.TRANSFORM_TEST_DATA.value in tasks_to_run:
+ mwb_logger.log_all_info(f"Transforming '{mapping_package.identifier}' Test Data ...")
+ task_progress.start_action_step(name=TaskToRun.TRANSFORM_TEST_DATA.value)
+ await transform_mapping_package_state(mapping_package_state=mapping_package_state)
+ task_progress.finish_current_action_step()
+ mwb_logger.log_all_info(f"Transforming '{mapping_package.identifier}' Test Data ... DONE")
+
+ if tasks_to_run is None or TaskToRun.GENERATE_CM_ASSERTIONS.value in tasks_to_run:
+ mwb_logger.log_all_info("Generating CM Assertions Queries ...")
+ task_progress.start_action_step(name=TaskToRun.GENERATE_CM_ASSERTIONS.value)
+ await generate_cm_assertions_queries_for_package_state(mapping_package_state=mapping_package_state)
+ task_progress.finish_current_action_step()
+ mwb_logger.log_all_info("Generating CM Assertions Queries ... DONE")
+
+ if tasks_to_run is None or TaskToRun.VALIDATE_PACKAGE.value in tasks_to_run:
+ mwb_logger.log_all_info("Validating Package State ...")
+ await validate_mapping_package(mapping_package_state, tasks_to_run, task_progress=task_progress)
+ mwb_logger.log_all_info("Validating Package State ... DONE")
mwb_logger.log_all_info("Saving Package State ...")
task_progress.start_action_step(name="save_package_state")
diff --git a/mapping_workbench/backend/package_processor/services/tasks.py b/mapping_workbench/backend/package_processor/services/tasks.py
index c3d55aa8b..284695dc1 100644
--- a/mapping_workbench/backend/package_processor/services/tasks.py
+++ b/mapping_workbench/backend/package_processor/services/tasks.py
@@ -12,21 +12,21 @@
def task_process_mapping_package(
package_id: PydanticObjectId,
- use_latest_package_state: bool = False,
+ use_only_package_state: bool = False,
tasks_to_run: List[str] = None,
user: User = None,
task_response: TaskResponse = None
):
run_task(
process_mapping_package,
- package_id, use_latest_package_state, tasks_to_run, user, task_response
+ package_id, use_only_package_state, tasks_to_run, user, task_response
)
async def add_task_process_mapping_package(
package_id: PydanticObjectId,
user: User = None,
- use_latest_package_state: bool = False,
+ use_only_package_state: bool = False,
tasks_to_run: List[str] = None
) -> Task:
task_timeout = 4 * 60 * 60 # 4 hours
@@ -39,5 +39,5 @@ async def add_task_process_mapping_package(
task_timeout,
user.email,
True,
- package_id, use_latest_package_state, tasks_to_run, user
+ package_id, use_only_package_state, tasks_to_run, user
)
diff --git a/mapping_workbench/backend/project/services/data.py b/mapping_workbench/backend/project/services/data.py
index 137b56fee..9754fb211 100644
--- a/mapping_workbench/backend/project/services/data.py
+++ b/mapping_workbench/backend/project/services/data.py
@@ -11,7 +11,8 @@
from mapping_workbench.backend.shacl_test_suite.models.entity import SHACLTestSuite, SHACLTestFileResource
from mapping_workbench.backend.sparql_test_suite.models.entity import SPARQLTestSuite, SPARQLTestFileResource
from mapping_workbench.backend.test_data_suite.models.entity import TestDataSuite, TestDataFileResource
-from mapping_workbench.backend.triple_map_fragment.models.entity import SpecificTripleMapFragment
+from mapping_workbench.backend.triple_map_fragment.models.entity import SpecificTripleMapFragment, \
+ GenericTripleMapFragment
async def clear_project_suite_resources(
@@ -48,36 +49,22 @@ async def clear_project_shared_resources(
)
-async def clear_project_package_resources(
- project_link: Link,
- suite_model,
- resource_model,
- resource_suite_ref_name
-):
- for suite in (await TestDataSuite.find(
- Eq(suite_model.mapping_package_id, None),
- Eq(suite_model.project, project_link)
- ).to_list()):
- await clear_project_suite_resources(
- project_link=project_link,
- suite=suite,
- suite_model=suite_model,
- resource_model=resource_model,
- resource_suite_ref_name=resource_suite_ref_name
- )
-
-
async def remove_project_orphan_shareable_resources(project_id: PydanticObjectId):
project_link = Project.link_from_id(project_id)
mapping_packages: List[MappingPackage] = await MappingPackage.find(
Eq(MappingPackage.project, project_link)
).to_list()
+ shared_test_data_suites_ids: List[PydanticObjectId] = []
shared_shacl_test_suites_ids: List[PydanticObjectId] = []
shared_sparql_test_suites_ids: List[PydanticObjectId] = []
shared_resource_collections_ids: List[PydanticObjectId] = []
for mapping_package in mapping_packages:
+ if mapping_package.test_data_suites:
+ shared_test_data_suites_ids += [
+ test_data_suite_ref.to_ref().id for test_data_suite_ref in mapping_package.test_data_suites
+ ]
if mapping_package.shacl_test_suites:
shared_shacl_test_suites_ids += [
shacl_test_suite_ref.to_ref().id for shacl_test_suite_ref in mapping_package.shacl_test_suites
@@ -91,6 +78,7 @@ async def remove_project_orphan_shareable_resources(project_id: PydanticObjectId
resource_collection_ref.to_ref().id for resource_collection_ref in mapping_package.resource_collections
]
+ shared_test_data_suites_ids = list(set(shared_test_data_suites_ids))
shared_shacl_test_suites_ids = list(set(shared_shacl_test_suites_ids))
shared_sparql_test_suites_ids = list(set(shared_sparql_test_suites_ids))
shared_resource_collections_ids = list(set(shared_resource_collections_ids))
@@ -119,9 +107,10 @@ async def remove_project_orphan_shareable_resources(project_id: PydanticObjectId
resource_suite_ref_name=ResourceFile.resource_collection
)
- await clear_project_package_resources(
+ await clear_project_shared_resources(
project_link=project_link,
suite_model=TestDataSuite,
+ shared_suite_ids=shared_test_data_suites_ids,
resource_model=TestDataFileResource,
resource_suite_ref_name=TestDataFileResource.test_data_suite
)
@@ -131,6 +120,16 @@ async def remove_project_orphan_shareable_resources(project_id: PydanticObjectId
Eq(SpecificTripleMapFragment.project, project_link)
).delete()
+ await GenericTripleMapFragment.find(
+ And(
+ Or(
+ Eq(GenericTripleMapFragment.refers_to_mapping_package_ids, None),
+ Eq(GenericTripleMapFragment.refers_to_mapping_package_ids, [])
+ ),
+ Eq(GenericTripleMapFragment.project, project_link)
+ )
+ ).delete()
+
await ConceptualMappingRule.find(
And(
Or(
diff --git a/mapping_workbench/backend/resource_collection/services/api.py b/mapping_workbench/backend/resource_collection/services/api.py
index 70f3e0096..6da57b733 100644
--- a/mapping_workbench/backend/resource_collection/services/api.py
+++ b/mapping_workbench/backend/resource_collection/services/api.py
@@ -7,6 +7,8 @@
from mapping_workbench.backend.core.services.request import request_update_data, api_entity_is_found, \
request_create_data, prepare_search_param, pagination_params
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
+from mapping_workbench.backend.mapping_package.services.link import unassign_resources_from_mapping_packages, \
+ ResourceField
from mapping_workbench.backend.resource_collection.models.entity import ResourceCollection, ResourceFile, \
ResourceFileUpdateIn, ResourceFileCreateIn
from mapping_workbench.backend.user.models.user import User
@@ -66,6 +68,11 @@ async def get_resource_collection(id: PydanticObjectId) -> ResourceCollection:
async def delete_resource_collection(resource_collection: ResourceCollection):
+ await unassign_resources_from_mapping_packages(
+ project_id=resource_collection.project.to_ref().id,
+ resources_ids=[resource_collection.id],
+ resources_field=ResourceField.RESOURCE_COLLECTIONS
+ )
return await resource_collection.delete()
diff --git a/mapping_workbench/backend/shacl_test_suite/services/api.py b/mapping_workbench/backend/shacl_test_suite/services/api.py
index 74f339de8..c31930272 100644
--- a/mapping_workbench/backend/shacl_test_suite/services/api.py
+++ b/mapping_workbench/backend/shacl_test_suite/services/api.py
@@ -7,6 +7,8 @@
from mapping_workbench.backend.core.services.request import request_update_data, api_entity_is_found, \
request_create_data, prepare_search_param, pagination_params
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
+from mapping_workbench.backend.mapping_package.services.link import ResourceField, \
+ unassign_resources_from_mapping_packages
from mapping_workbench.backend.project.models.entity import Project
from mapping_workbench.backend.shacl_test_suite.models.entity import SHACLTestSuite, SHACLTestFileResource
from mapping_workbench.backend.shacl_test_suite.models.entity_api_response import SHACLTestFileResourceCreateIn, \
@@ -67,6 +69,11 @@ async def get_shacl_test_suite(id: PydanticObjectId) -> SHACLTestSuite:
async def delete_shacl_test_suite(shacl_test_suite: SHACLTestSuite):
+ await unassign_resources_from_mapping_packages(
+ project_id=shacl_test_suite.project.to_ref().id,
+ resources_ids=[shacl_test_suite.id],
+ resources_field=ResourceField.SHACL_TEST_SUITES
+ )
return await shacl_test_suite.delete()
diff --git a/mapping_workbench/backend/sparql_test_suite/services/api.py b/mapping_workbench/backend/sparql_test_suite/services/api.py
index f177f7160..d7a448c78 100644
--- a/mapping_workbench/backend/sparql_test_suite/services/api.py
+++ b/mapping_workbench/backend/sparql_test_suite/services/api.py
@@ -7,6 +7,8 @@
from mapping_workbench.backend.core.services.request import request_update_data, api_entity_is_found, \
request_create_data, prepare_search_param, pagination_params
from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
+from mapping_workbench.backend.mapping_package.services.link import ResourceField, \
+ unassign_resources_from_mapping_packages
from mapping_workbench.backend.project.models.entity import Project
from mapping_workbench.backend.sparql_test_suite.models.entity import SPARQLTestSuite, SPARQLTestFileResource, \
SPARQLTestFileResourceUpdateIn, SPARQLTestFileResourceCreateIn
@@ -66,6 +68,11 @@ async def get_sparql_test_suite(id: PydanticObjectId) -> SPARQLTestSuite:
async def delete_sparql_test_suite(sparql_test_suite: SPARQLTestSuite):
+ await unassign_resources_from_mapping_packages(
+ project_id=sparql_test_suite.project.to_ref().id,
+ resources_ids=[sparql_test_suite.id],
+ resources_field=ResourceField.SPARQL_TEST_SUITES
+ )
return await sparql_test_suite.delete()
diff --git a/mapping_workbench/backend/test_data_suite/entrypoints/api/routes.py b/mapping_workbench/backend/test_data_suite/entrypoints/api/routes.py
index 6f74b0024..6d03c73e1 100644
--- a/mapping_workbench/backend/test_data_suite/entrypoints/api/routes.py
+++ b/mapping_workbench/backend/test_data_suite/entrypoints/api/routes.py
@@ -384,10 +384,7 @@ async def route_transform_test_data_file_resource_with_triple_map(
mappings = [await get_specific_triple_map_fragment(specific_triple_map_id)] if use_this_triple_map else None
try:
- package_id = (
- mapping_package_id
- # or await get_mapping_package_id_for_test_data_file_resource(test_data_file_resource)
- )
+ package_id = mapping_package_id
test_data_file_resource = await transform_test_data_file_resource(
test_data_file_resource=test_data_file_resource,
package_id=package_id,
diff --git a/mapping_workbench/backend/test_data_suite/models/entity.py b/mapping_workbench/backend/test_data_suite/models/entity.py
index 6fdad2bd7..8c322c04e 100644
--- a/mapping_workbench/backend/test_data_suite/models/entity.py
+++ b/mapping_workbench/backend/test_data_suite/models/entity.py
@@ -6,6 +6,8 @@
from pydantic import BaseModel
from pymongo import IndexModel
+from mapping_workbench.backend.core.models.base_mapping_package_resource_entity import \
+ BaseMappingPackagesResourceSchemaTrait
from mapping_workbench.backend.core.models.base_project_resource_entity import BaseProjectResourceEntity
from mapping_workbench.backend.file_resource.models.file_resource import FileResource, FileResourceCollection, \
FileResourceIn, FileResourceFormat, FileResourceState
@@ -123,6 +125,7 @@ class TestDataSuiteState(TestDataValidation, ObjectState):
class TestDataSuite(
FileResourceCollection,
+ BaseMappingPackagesResourceSchemaTrait,
StatefulObjectABC
):
file_resources: Optional[List[Link[TestDataFileResource]]] = []
diff --git a/mapping_workbench/backend/test_data_suite/services/api.py b/mapping_workbench/backend/test_data_suite/services/api.py
index da0fa1afa..5263fcbaa 100644
--- a/mapping_workbench/backend/test_data_suite/services/api.py
+++ b/mapping_workbench/backend/test_data_suite/services/api.py
@@ -6,6 +6,9 @@
from mapping_workbench.backend.core.services.exceptions import ResourceNotFoundException
from mapping_workbench.backend.core.services.request import request_update_data, api_entity_is_found, \
request_create_data, prepare_search_param, pagination_params
+from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
+from mapping_workbench.backend.mapping_package.services.link import unassign_resources_from_mapping_packages, \
+ ResourceField
from mapping_workbench.backend.test_data_suite.models.entity import TestDataSuite, TestDataFileResource, \
TestDataFileResourceUpdateIn, TestDataFileResourceCreateIn
from mapping_workbench.backend.test_data_suite.services.transform_test_data import transform_test_data_file_resource
@@ -32,7 +35,19 @@ async def list_test_data_suites(filters: dict = None, page: int = None, limit: i
async def create_test_data_suite(test_data_suite: TestDataSuite, user: User) -> TestDataSuite:
test_data_suite.on_create(user=user)
- return await test_data_suite.create()
+ pkg_ids = test_data_suite.refers_to_mapping_package_ids or []
+ test_data_suite.refers_to_mapping_package_ids = None
+ suite = await test_data_suite.create()
+
+ for pkg_id in pkg_ids:
+ mapping_package = await MappingPackage.get(pkg_id)
+ if mapping_package:
+ if not mapping_package.test_data_suites:
+ mapping_package.test_data_suites = []
+ mapping_package.test_data_suites.append(TestDataSuite.link_from_id(suite.id))
+ await mapping_package.save()
+
+ return suite
async def update_test_data_suite(
@@ -53,6 +68,11 @@ async def get_test_data_suite(id: PydanticObjectId) -> TestDataSuite:
async def delete_test_data_suite(test_data_suite: TestDataSuite):
+ await unassign_resources_from_mapping_packages(
+ project_id=test_data_suite.project.to_ref().id,
+ resources_ids=[test_data_suite.id],
+ resources_field=ResourceField.TEST_DATA_SUITES
+ )
return await test_data_suite.delete()
@@ -96,10 +116,7 @@ async def update_test_data_file_resource(
update_data = request_update_data(data, user=user)
test_data_file_resource = await test_data_file_resource.set(update_data)
if transform_test_data:
- package_id = (
- transform_mapping_package_id
- # or await get_mapping_package_id_for_test_data_file_resource(test_data_file_resource)
- )
+ package_id = transform_mapping_package_id
test_data_file_resource = await transform_test_data_file_resource(
test_data_file_resource=test_data_file_resource,
package_id=package_id,
diff --git a/mapping_workbench/backend/test_data_suite/services/data.py b/mapping_workbench/backend/test_data_suite/services/data.py
index a788948f9..7c6d928bc 100644
--- a/mapping_workbench/backend/test_data_suite/services/data.py
+++ b/mapping_workbench/backend/test_data_suite/services/data.py
@@ -1,7 +1,10 @@
from typing import List
from beanie import PydanticObjectId
+from beanie.odm.operators.find.comparison import In, Eq
+from mapping_workbench.backend.mapping_package.models.entity import MappingPackage
+from mapping_workbench.backend.mapping_package.services.api import get_mapping_package
from mapping_workbench.backend.project.models.entity import Project
from mapping_workbench.backend.test_data_suite.models.entity import TestDataFileResource, TestDataSuite, \
TestDataFileResourceFormat
@@ -18,9 +21,15 @@ async def get_test_data_file_resources_for_project(project_id: PydanticObjectId)
async def get_test_data_file_resources_for_package(package_id: PydanticObjectId) -> \
List[TestDataFileResource]:
- test_data_suites: List[TestDataSuite] = await TestDataSuite.find(
- TestDataSuite.mapping_package_id == package_id
+ mapping_package: MappingPackage = await get_mapping_package(package_id)
+ test_data_suites_ids = []
+ if mapping_package.test_data_suites:
+ test_data_suites_ids = [test_data_suite.to_ref().id for test_data_suite in mapping_package.test_data_suites]
+ test_data_suites = await TestDataSuite.find(
+ In(TestDataSuite.id, test_data_suites_ids),
+ Eq(TestDataSuite.project, mapping_package.project)
).to_list()
+
test_data_file_resources: List[TestDataFileResource] = []
for test_data_suite in test_data_suites:
test_data_file_resources += await TestDataFileResource.find(
@@ -32,14 +41,3 @@ async def get_test_data_file_resources_for_package(package_id: PydanticObjectId)
def is_valid_test_data_format(test_format: str):
return test_format in [e.value for e in TestDataFileResourceFormat]
-
-
-async def get_mapping_package_id_for_test_data_file_resource(
- test_data_file_resource: TestDataFileResource
-) -> PydanticObjectId:
- package_id = None
- if test_data_file_resource.test_data_suite:
- test_data_suite: TestDataSuite = await test_data_file_resource.test_data_suite.fetch()
- if isinstance(test_data_suite, TestDataSuite) and test_data_suite.mapping_package_id:
- package_id = test_data_suite.mapping_package_id
- return package_id
diff --git a/mapping_workbench/backend/test_data_suite/services/link.py b/mapping_workbench/backend/test_data_suite/services/link.py
index a19de7c97..d336e306c 100644
--- a/mapping_workbench/backend/test_data_suite/services/link.py
+++ b/mapping_workbench/backend/test_data_suite/services/link.py
@@ -1,12 +1,11 @@
from mapping_workbench.backend.core.models.api_request import AssignMappingPackagesRequest
-from mapping_workbench.backend.mapping_package.services.link import assign_mapping_package_to_resources
-from mapping_workbench.backend.test_data_suite.models.entity import TestDataSuite
+from mapping_workbench.backend.mapping_package.services.link import assign_resources_to_mapping_packages, ResourceField
async def assign_test_data_suites_to_mapping_packages(data: AssignMappingPackagesRequest):
- await assign_mapping_package_to_resources(
+ await assign_resources_to_mapping_packages(
project_id=data.project,
- mapping_package_id=(data.mapping_packages_ids[0] if data.mapping_packages_ids else None),
- resource_model=TestDataSuite,
- resources_ids=data.resources_ids
+ resources_ids=data.resources_ids,
+ resources_field=ResourceField.TEST_DATA_SUITES,
+ mapping_packages_ids=data.mapping_packages_ids
)
diff --git a/mapping_workbench/backend/test_data_suite/services/transform_test_data.py b/mapping_workbench/backend/test_data_suite/services/transform_test_data.py
index 35086beae..9d0d8cef0 100644
--- a/mapping_workbench/backend/test_data_suite/services/transform_test_data.py
+++ b/mapping_workbench/backend/test_data_suite/services/transform_test_data.py
@@ -20,7 +20,9 @@
from mapping_workbench.backend.triple_map_fragment.models.entity import TripleMapFragment, TripleMapFragmentState, \
TripleMapFragmentABC
from mapping_workbench.backend.triple_map_fragment.services.data_for_generic import \
- get_generic_triple_map_fragments_for_project, get_specific_triple_map_fragments_for_package
+ get_generic_triple_map_fragments_for_project_package
+from mapping_workbench.backend.triple_map_fragment.services.data_for_specific import \
+ get_specific_triple_map_fragments_for_package
from mapping_workbench.backend.user.models.user import User
@@ -68,15 +70,19 @@ async def get_mappings_to_transform_test_data(
project_id: PydanticObjectId,
package_id: PydanticObjectId = None
):
- generic_mappings = await get_generic_triple_map_fragments_for_project(
- project_id=project_id
- )
+ mappings = []
+
specific_mappings = []
if package_id is not None:
+ generic_mappings = await get_generic_triple_map_fragments_for_project_package(
+ project_id=project_id,
+ package_id=package_id
+ )
specific_mappings = await get_specific_triple_map_fragments_for_package(
+ project_id=project_id,
package_id=package_id
)
- return generic_mappings + specific_mappings
+ return mappings
async def transform_test_data_file_resource(
diff --git a/mapping_workbench/backend/triple_map_fragment/models/entity.py b/mapping_workbench/backend/triple_map_fragment/models/entity.py
index cf4f2ad06..0ba2cb03e 100644
--- a/mapping_workbench/backend/triple_map_fragment/models/entity.py
+++ b/mapping_workbench/backend/triple_map_fragment/models/entity.py
@@ -7,7 +7,9 @@
from mapping_workbench.backend.core.models.base_mapping_package_resource_entity import \
BaseMappingPackageResourceEntityOutSchema, BaseMappingPackageResourceEntityInSchema, \
- BaseMappingPackageResourceEntityUpdateInSchema, BaseMappingPackageResourceSchemaTrait
+ BaseMappingPackageResourceEntityUpdateInSchema, BaseMappingPackageResourceSchemaTrait, \
+ BaseMappingPackagesResourceEntityOutSchema, BaseMappingPackagesResourceEntityInSchema, \
+ BaseMappingPackagesResourceSchemaTrait
from mapping_workbench.backend.core.models.base_project_resource_entity import BaseProjectResourceEntity, \
BaseProjectResourceEntityInSchema, BaseProjectResourceEntityOutSchema, BaseProjectResourceEntityUpdateInSchema
from mapping_workbench.backend.state_manager.models.state_object import ObjectState, StatefulObjectABC
@@ -56,7 +58,7 @@ class SpecificTripleMapFragmentOut(
format: Optional[TripleMapFragmentFormat] = None
-class GenericTripleMapFragmentIn(BaseProjectResourceEntityInSchema):
+class GenericTripleMapFragmentIn(BaseProjectResourceEntityInSchema, BaseMappingPackagesResourceEntityInSchema):
identifier: Optional[str] = None
triple_map_uri: Optional[str] = None
triple_map_content: Optional[str] = None
@@ -71,7 +73,7 @@ class GenericTripleMapFragmentUpdateIn(GenericTripleMapFragmentIn):
pass
-class GenericTripleMapFragmentOut(BaseProjectResourceEntityOutSchema):
+class GenericTripleMapFragmentOut(BaseProjectResourceEntityOutSchema, BaseMappingPackagesResourceEntityOutSchema):
identifier: Optional[str] = None
triple_map_uri: Optional[str] = None
triple_map_content: Optional[str] = None
@@ -90,7 +92,11 @@ class TripleMapFragmentState(ObjectState, TripleMapFragmentABC):
format: Optional[TripleMapFragmentFormat] = None
-class TripleMapFragment(BaseProjectResourceEntity, StatefulObjectABC, TripleMapFragmentABC):
+class TripleMapFragment(
+ BaseProjectResourceEntity,
+ StatefulObjectABC,
+ TripleMapFragmentABC
+):
identifier: Optional[str] = None
triple_map_uri: Optional[str] = None
triple_map_content: Optional[str] = None
@@ -124,7 +130,10 @@ class Settings(TripleMapFragment.Settings):
]
-class GenericTripleMapFragment(TripleMapFragment):
+class GenericTripleMapFragment(
+ TripleMapFragment,
+ BaseMappingPackagesResourceSchemaTrait
+):
class Settings(TripleMapFragment.Settings):
name = "generic_triple_map_fragments"
diff --git a/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py b/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py
index aff508c8c..f6dbf70f9 100644
--- a/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py
+++ b/mapping_workbench/backend/triple_map_fragment/services/api_for_generic.py
@@ -50,8 +50,15 @@ async def update_generic_triple_map_fragment(
data: GenericTripleMapFragmentUpdateIn,
user: User
) -> GenericTripleMapFragmentOut:
+ update_data = request_update_data(data, user=user)
+ if GenericTripleMapFragment.refers_to_mapping_package_ids in update_data:
+ update_data[GenericTripleMapFragment.refers_to_mapping_package_ids] = [
+ PydanticObjectId(package_id) for package_id in
+ update_data[GenericTripleMapFragment.refers_to_mapping_package_ids]
+ ]
+
return GenericTripleMapFragmentOut(**(
- await generic_triple_map_fragment.set(request_update_data(data, user=user))
+ await generic_triple_map_fragment.set(update_data)
).model_dump())
diff --git a/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py b/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py
index a9b4f2549..b24e0151a 100644
--- a/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py
+++ b/mapping_workbench/backend/triple_map_fragment/services/data_for_generic.py
@@ -1,25 +1,19 @@
from typing import List
from beanie import PydanticObjectId
+from beanie.odm.operators.find.comparison import Eq
from mapping_workbench.backend.project.models.entity import Project
-from mapping_workbench.backend.triple_map_fragment.models.entity import GenericTripleMapFragment, \
- SpecificTripleMapFragment
+from mapping_workbench.backend.triple_map_fragment.models.entity import GenericTripleMapFragment
-async def get_generic_triple_map_fragments_for_project(project_id: PydanticObjectId) -> \
- List[GenericTripleMapFragment]:
+async def get_generic_triple_map_fragments_for_project_package(
+ project_id: PydanticObjectId,
+ package_id: PydanticObjectId
+) -> List[GenericTripleMapFragment]:
items: List[GenericTripleMapFragment] = await GenericTripleMapFragment.find(
- GenericTripleMapFragment.project == Project.link_from_id(project_id)
- ).to_list()
-
- return items
-
-
-async def get_specific_triple_map_fragments_for_package(package_id: PydanticObjectId) -> \
- List[SpecificTripleMapFragment]:
- items: List[SpecificTripleMapFragment] = await SpecificTripleMapFragment.find(
- SpecificTripleMapFragment.mapping_package_id == package_id
+ GenericTripleMapFragment.project == Project.link_from_id(project_id),
+ Eq(GenericTripleMapFragment.refers_to_mapping_package_ids, package_id)
).to_list()
return items
diff --git a/mapping_workbench/backend/triple_map_fragment/services/data_for_specific.py b/mapping_workbench/backend/triple_map_fragment/services/data_for_specific.py
new file mode 100644
index 000000000..2650dee88
--- /dev/null
+++ b/mapping_workbench/backend/triple_map_fragment/services/data_for_specific.py
@@ -0,0 +1,16 @@
+from typing import List
+
+from beanie import PydanticObjectId
+
+from mapping_workbench.backend.project.models.entity import Project
+from mapping_workbench.backend.triple_map_fragment.models.entity import SpecificTripleMapFragment
+
+
+async def get_specific_triple_map_fragments_for_package(project_id: PydanticObjectId, package_id: PydanticObjectId) -> \
+ List[SpecificTripleMapFragment]:
+ items: List[SpecificTripleMapFragment] = await SpecificTripleMapFragment.find(
+ SpecificTripleMapFragment.project == Project.link_from_id(project_id),
+ SpecificTripleMapFragment.mapping_package_id == package_id
+ ).to_list()
+
+ return items
diff --git a/mapping_workbench/frontend/src/api/mapping-packages/index.js b/mapping_workbench/frontend/src/api/mapping-packages/index.js
index 7be30b21b..b1fce821a 100644
--- a/mapping_workbench/frontend/src/api/mapping-packages/index.js
+++ b/mapping_workbench/frontend/src/api/mapping-packages/index.js
@@ -3,6 +3,7 @@ import {appApi} from "../app";
import {shaclTestSuitesApi} from "../shacl-test-suites";
import {sparqlTestSuitesApi} from "../sparql-test-suites";
import {resourceCollectionsApi} from "../resource-collections";
+import {testDataSuitesApi} from "../test-data-suites";
export const PACKAGE_TYPE = {
EFORMS: 'eForms', STANDARD: 'Standard'
@@ -37,6 +38,7 @@ class MappingPackagesApi extends SectionApi {
id: mappingPackage._id, title: mappingPackage.title, identifier: mappingPackage.identifier
}
if (full) {
+ data[testDataSuitesApi.MAPPING_PACKAGE_LINK_FIELD] = mappingPackage[testDataSuitesApi.MAPPING_PACKAGE_LINK_FIELD];
data[shaclTestSuitesApi.MAPPING_PACKAGE_LINK_FIELD] = mappingPackage[shaclTestSuitesApi.MAPPING_PACKAGE_LINK_FIELD];
data[sparqlTestSuitesApi.MAPPING_PACKAGE_LINK_FIELD] = mappingPackage[sparqlTestSuitesApi.MAPPING_PACKAGE_LINK_FIELD];
data[resourceCollectionsApi.MAPPING_PACKAGE_LINK_FIELD] = mappingPackage[resourceCollectionsApi.MAPPING_PACKAGE_LINK_FIELD];
diff --git a/mapping_workbench/frontend/src/api/test-data-suites/index.js b/mapping_workbench/frontend/src/api/test-data-suites/index.js
index 76f287413..15757e1d9 100644
--- a/mapping_workbench/frontend/src/api/test-data-suites/index.js
+++ b/mapping_workbench/frontend/src/api/test-data-suites/index.js
@@ -35,8 +35,7 @@ class TestDataSuitesApi extends FileCollectionsApi {
constructor() {
super("test_data_suites");
this.isProjectResource = true;
- this.hasMappingPackage = true;
- this.isMappingPackageRequired = this.hasMappingPackage && false;
+ this.refersToMappingPackages = true;
}
async getValuesForSelector(request = {}) {
diff --git a/mapping_workbench/frontend/src/hooks/use-items-store.js b/mapping_workbench/frontend/src/hooks/use-items-store.js
index afe2e227b..f447c71ff 100644
--- a/mapping_workbench/frontend/src/hooks/use-items-store.js
+++ b/mapping_workbench/frontend/src/hooks/use-items-store.js
@@ -20,6 +20,7 @@ export const useItemsStore = (sectionApi) => {
[]);
return {
+ handleItemsGet,
...state
};
};
\ No newline at end of file
diff --git a/mapping_workbench/frontend/src/pages/app/conceptual-mapping-rules/overview/index.js b/mapping_workbench/frontend/src/pages/app/conceptual-mapping-rules/overview/index.js
index 030c59050..61203ec67 100644
--- a/mapping_workbench/frontend/src/pages/app/conceptual-mapping-rules/overview/index.js
+++ b/mapping_workbench/frontend/src/pages/app/conceptual-mapping-rules/overview/index.js
@@ -23,7 +23,6 @@ import FormControlLabel from '@mui/material/FormControlLabel';
import {paths} from 'src/paths';
import {Seo} from 'src/components/seo';
-import {sessionApi} from 'src/api/session';
import {tokens} from "/src/locales/tokens";
import {useDialog} from 'src/hooks/use-dialog';
import {usePageView} from 'src/hooks/use-page-view';
diff --git a/mapping_workbench/frontend/src/pages/app/test-data-suites/index.js b/mapping_workbench/frontend/src/pages/app/test-data-suites/index.js
index 3854da553..97148c068 100644
--- a/mapping_workbench/frontend/src/pages/app/test-data-suites/index.js
+++ b/mapping_workbench/frontend/src/pages/app/test-data-suites/index.js
@@ -17,12 +17,42 @@ import {Layout as AppLayout} from 'src/layouts/app';
import {usePageView} from 'src/hooks/use-page-view';
import {RouterLink} from 'src/components/router-link';
import useItemsSearch from 'src/hooks/use-items-search';
-import {useItemsStore} from 'src/hooks/use-items-store';
import {TableSearchBar} from "src/sections/components/table-search-bar";
import {testDataSuitesApi as sectionApi} from 'src/api/test-data-suites';
import {BreadcrumbsSeparator} from 'src/components/breadcrumbs-separator';
import {FileCollectionUploader} from "src/sections/app/file-manager/file-collection-uploader";
import {TestDataCollectionListTable} from "src/sections/app/file-manager/test-data-collection-list-table";
+import {useEffect, useState} from "react";
+
+const useItemsStore = () => {
+ const [state, setState] = useState({
+ items: [],
+ itemsCount: 0,
+ force: 0
+ });
+
+ const handleItemsGet = (force = 0) => {
+ sectionApi.getItems()
+ .then(res =>
+ setState({
+ items: res.items,
+ itemsCount: res.count,
+ force: force
+ }))
+ .catch(err => console.error(err))
+ }
+
+ useEffect(() => {
+ handleItemsGet();
+ },
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ []);
+
+ return {
+ handleItemsGet,
+ ...state
+ };
+};
const Page = () => {
const uploadDialog = useDialog()
@@ -107,6 +137,7 @@ const Page = () => {
onSort={itemsSearch.handleSort}
page={itemsSearch.state.page}
items={itemsSearch.pagedItems}
+ itemsForced={itemsStore.force}
count={itemsStore.itemsCount}
rowsPerPage={itemsSearch.state.rowsPerPage}
sectionApi={sectionApi}
diff --git a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/edit.js b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/edit.js
index 5bba65815..5f3e6639a 100644
--- a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/edit.js
+++ b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/edit.js
@@ -1,5 +1,5 @@
import {useEffect, useState} from "react";
-import {specificTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/specific';
+import {genericTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/generic';
import ArrowLeftIcon from '@untitled-ui/icons-react/build/esm/ArrowLeft';
import Link from '@mui/material/Link';
diff --git a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/view.js b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/view.js
index 452599849..6eb926930 100644
--- a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/view.js
+++ b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/[id]/view.js
@@ -1,8 +1,6 @@
import {useCallback, useState} from 'react';
import ArrowLeftIcon from '@untitled-ui/icons-react/build/esm/ArrowLeft';
-import Box from '@mui/material/Box';
import Chip from '@mui/material/Chip';
-import Container from '@mui/material/Container';
import Divider from '@mui/material/Divider';
import Grid from '@mui/material/Unstable_Grid2';
import Link from '@mui/material/Link';
@@ -12,7 +10,7 @@ import Tab from '@mui/material/Tab';
import Tabs from '@mui/material/Tabs';
import Typography from '@mui/material/Typography';
-import {specificTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/specific';
+import {genericTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/generic';
import {RouterLink} from 'src/components/router-link';
import {Seo} from 'src/components/seo';
import {usePageView} from 'src/hooks/use-page-view';
diff --git a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/create.js b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/create.js
index 2b0a61a66..2c77d127e 100644
--- a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/create.js
+++ b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/create.js
@@ -4,7 +4,7 @@ import Stack from '@mui/material/Stack';
import SvgIcon from '@mui/material/SvgIcon';
import Typography from '@mui/material/Typography';
-import {specificTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/specific';
+import {genericTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/generic';
import {RouterLink} from 'src/components/router-link';
import {Seo} from 'src/components/seo';
import {usePageView} from 'src/hooks/use-page-view';
diff --git a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/index.js b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/index.js
index d5752da58..290cf56d1 100644
--- a/mapping_workbench/frontend/src/pages/app/triple-map-fragments/index.js
+++ b/mapping_workbench/frontend/src/pages/app/triple-map-fragments/index.js
@@ -1,4 +1,3 @@
-import {useEffect, useState} from 'react';
import {useFormik} from "formik";
import * as Yup from "yup";
@@ -26,7 +25,7 @@ import {TableSearchBar} from "src/sections/components/table-search-bar";
import {BreadcrumbsSeparator} from 'src/components/breadcrumbs-separator';
import {ListTable} from "src/sections/app/generic-triple-map-fragment/list-table";
import {FileUploader} from "src/sections/app/generic-triple-map-fragment/file-uploader";
-import {specificTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/specific';
+import {genericTripleMapFragmentsApi as sectionApi} from 'src/api/triple-map-fragments/generic';
const Page = () => {
const uploadDialog = useDialog();
diff --git a/mapping_workbench/frontend/src/sections/app/file-manager/file-collection-list-table.js b/mapping_workbench/frontend/src/sections/app/file-manager/file-collection-list-table.js
index 2c228b863..e7f6225f6 100644
--- a/mapping_workbench/frontend/src/sections/app/file-manager/file-collection-list-table.js
+++ b/mapping_workbench/frontend/src/sections/app/file-manager/file-collection-list-table.js
@@ -255,7 +255,7 @@ export const FileCollectionListTable = (props) => {
page = 0,
rowsPerPage = 0,
sectionApi,
- getItems = () => {
+ getItems = (number) => {
},
selectable = null,
fileResourceApi,
@@ -292,17 +292,6 @@ export const FileCollectionListTable = (props) => {
.catch(err => console.error(err))
}, [itemsForced])
- const [projectMappingPackagesMap, setProjectMappingPackagesMap] = useState({});
-
- useEffect(() => {
- (() => {
- setProjectMappingPackagesMap(projectMappingPackages.reduce((a, b) => {
- a[b['id']] = b['title'];
- return a
- }, {}));
- })()
- }, [projectMappingPackages])
-
const onMappingPackagesAssign = () => {
getItems(Date.now());
}
@@ -369,7 +358,6 @@ export const FileCollectionListTable = (props) => {
isItemSelected={isItemSelected}
sectionApi={sectionApi}
router={router}
- projectMappingPackagesMap={projectMappingPackagesMap}
projectMappingPackages={projectMappingPackages}
selectable={selectable}
/>
diff --git a/mapping_workbench/frontend/src/sections/app/file-manager/test-data-collection-list-table.js b/mapping_workbench/frontend/src/sections/app/file-manager/test-data-collection-list-table.js
index 1776c3996..25e461a11 100644
--- a/mapping_workbench/frontend/src/sections/app/file-manager/test-data-collection-list-table.js
+++ b/mapping_workbench/frontend/src/sections/app/file-manager/test-data-collection-list-table.js
@@ -37,6 +37,8 @@ import {ListItemActions} from "src/components/app/list/list-item-actions";
import {ForListItemAction} from 'src/contexts/app/section/for-list-item-action';
import {testDataFileResourcesApi as fileResourcesApi} from "src/api/test-data-suites/file-resources";
import {MappingPackagesBulkAssigner} from "src/sections/app/mapping-package/components/mapping-packages-bulk-assigner";
+import ListItem from "@mui/material/ListItem";
+import List from "@mui/material/List";
export const ListTableRow = (props) => {
@@ -50,7 +52,7 @@ export const ListTableRow = (props) => {
sectionApi,
router,
getItems,
- projectMappingPackagesMap
+ projectMappingPackages
} = props;
const {timeSetting} = useGlobalState()
@@ -58,13 +60,12 @@ export const ListTableRow = (props) => {
const [confirmOpen, setConfirmOpen] = useState(false);
const uploadDialog = useDialog()
-
useEffect(() => {
getFileResources()
}, [])
const getFileResources = () => {
- sectionApi.getFileResources(item_id)
+ sectionApi.getFileResources(item_id, {rowsPerPage: -1})
.then(res => setCollectionResources(res.items))
}
@@ -144,7 +145,26 @@ export const ListTableRow = (props) => {
- {item.mapping_package_id && projectMappingPackagesMap[item.mapping_package_id]}
+
+ {
+ sectionApi.MAPPING_PACKAGE_LINK_FIELD
+ && projectMappingPackages
+ .filter(
+ projectMappingPackage => projectMappingPackage?.[sectionApi.MAPPING_PACKAGE_LINK_FIELD]
+ ?.some(resource_ref => item_id === resource_ref.id)
+ )
+ .map((mapping_package) => {
+ console.log(mapping_package.title);
+ return (
+
+ {mapping_package['title']}
+
+ );
+ })}
+
{timeTransformer(item.created_at, timeSetting)}
@@ -268,13 +288,14 @@ export const TestDataCollectionListTable = (props) => {
const {
count = 0,
items = [],
+ itemsForced = 0,
onPageChange = () => {
},
onRowsPerPageChange,
page = 0,
rowsPerPage = 0,
sectionApi,
- getItems = () => {
+ getItems = (number) => {
}
} = props;
@@ -306,24 +327,13 @@ export const TestDataCollectionListTable = (props) => {
const [projectMappingPackages, setProjectMappingPackages] = useState([]);
useEffect(() => {
- (async () => {
- setProjectMappingPackages(await mappingPackagesApi.getProjectPackages());
- })()
- }, [])
-
- const [projectMappingPackagesMap, setProjectMappingPackagesMap] = useState({});
-
- useEffect(() => {
- (() => {
- setProjectMappingPackagesMap(projectMappingPackages.reduce((a, b) => {
- a[b['id']] = b['title'];
- return a
- }, {}));
- })()
- }, [projectMappingPackages])
+ mappingPackagesApi.getProjectPackages(true)
+ .then(res => setProjectMappingPackages(res))
+ .catch(err => console.error(err))
+ }, [itemsForced])
const onMappingPackagesAssign = () => {
- getItems()
+ getItems(Date.now())
}
return (<>
@@ -357,7 +367,7 @@ export const TestDataCollectionListTable = (props) => {
Title
- Package
+ Packages
Created
@@ -383,7 +393,7 @@ export const TestDataCollectionListTable = (props) => {
sectionApi={sectionApi}
router={router}
getItems={getItems}
- projectMappingPackagesMap={projectMappingPackagesMap}
+ projectMappingPackages={projectMappingPackages}
/>
)
})}
@@ -398,6 +408,7 @@ export const TestDataCollectionListTable = (props) => {
TestDataCollectionListTable.propTypes = {
count: PropTypes.number,
items: PropTypes.array,
+ itemsForced: PropTypes.number,
onPageChange: PropTypes.func,
onRowsPerPageChange: PropTypes.func,
page: PropTypes.number,
@@ -417,5 +428,5 @@ ListTableRow.propTypes = {
sectionApi: PropTypes.object,
router: PropTypes.object,
getItems: PropTypes.func,
- projectMappingPackagesMap: PropTypes.object
+ projectMappingPackages: PropTypes.array
}
\ No newline at end of file
diff --git a/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/edit-form.js b/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/edit-form.js
index a9f3ec3d2..49f4eb1eb 100644
--- a/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/edit-form.js
+++ b/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/edit-form.js
@@ -34,6 +34,8 @@ import CodeMirrorDefault from "src/components/app/form/codeMirrorDefault";
import {FormCodeReadOnlyArea} from "src/components/app/form/code-read-only-area";
import {toastError, toastLoad, toastSuccess, toastWarning} from "src/components/app-toast";
import {MappingPackageFormSelect} from "../mapping-package/components/mapping-package-form-select";
+import {MappingPackageCheckboxList} from "../mapping-package/components/mapping-package-checkbox-list";
+import Divider from "@mui/material/Divider";
export const EditForm = (props) => {
@@ -55,7 +57,7 @@ export const EditForm = (props) => {
const initialValues = {
triple_map_uri: item.triple_map_uri ?? '',
triple_map_content: item.triple_map_content ?? '',
- mapping_package_id: item.mapping_package_id ?? '',
+ refers_to_mapping_package_ids: item.refers_to_mapping_package_ids ?? [],
format: item.format ?? sectionApi.FILE_RESOURCE_DEFAULT_FORMAT ?? '',
};
@@ -72,16 +74,11 @@ export const EditForm = (props) => {
.string()
.max(255)
.required('Format is required'),
- mapping_package_id:
- Yup
- .string()
- .required('Mapping Package is required'),
}),
onSubmit: async (values, helpers) => {
const toastId = toastLoad("Updating...")
try {
let response;
- if (!values['mapping_package_id']) values['mapping_package_id'] = null;
values['project'] = sessionApi.getSessionProject();
if (itemctx.isNew) {
response = await sectionApi.createItem(values);
@@ -125,7 +122,6 @@ export const EditForm = (props) => {
}
const onUpdateAndTransform = (values, helpers) => {
-
values['project'] = sessionApi.getSessionProject();
if (!values['mapping_package_id']) values['mapping_package_id'] = null;
values['id'] = item._id;
@@ -230,13 +226,17 @@ export const EditForm = (props) => {
))}
-
-
+
+
+
+
+
+
+
+
{
value=""
/>
-
-
-
{
}
+
+
+
diff --git a/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/list-table.js b/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/list-table.js
index cf340c65c..58413c1ed 100644
--- a/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/list-table.js
+++ b/mapping_workbench/frontend/src/sections/app/generic-triple-map-fragment/list-table.js
@@ -1,7 +1,5 @@
import {Fragment, useEffect, useState} from 'react';
import PropTypes from 'prop-types';
-import {turtle} from 'codemirror-lang-turtle';
-import {yaml} from '@codemirror/lang-yaml';
import EditIcon from '@untitled-ui/icons-react/build/esm/Edit05';
import ChevronRightIcon from '@mui/icons-material/ChevronRight';
@@ -14,7 +12,6 @@ import Stack from '@mui/material/Stack';
import Button from '@mui/material/Button';
import Dialog from '@mui/material/Dialog';
import SvgIcon from '@mui/material/SvgIcon';
-import {useTheme} from "@mui/material/styles";
import TableRow from '@mui/material/TableRow';
import TableHead from '@mui/material/TableHead';
import TableBody from '@mui/material/TableBody';
@@ -33,7 +30,7 @@ import {mappingPackagesApi} from "src/api/mapping-packages";
import TablePagination from "src/sections/components/table-pagination";
import CodeMirrorDefault from 'src/components/app/form/codeMirrorDefault';
import {ListItemActions} from 'src/components/app/list/list-item-actions';
-import {conceptualMappingRulesApi} from 'src/api/conceptual-mapping-rules';
+import {genericTripleMapFragmentsApi} from 'src/api/triple-map-fragments/generic';
import TableSorterHeader from "src/sections/components/table-sorter-header";
import {ForListItemAction} from 'src/contexts/app/section/for-list-item-action';
import {
@@ -45,15 +42,15 @@ export const ListTableMappingPackages = (props) => {
const {
item,
initProjectMappingPackages = null,
- onPackagesUpdate = () => { },
+ onPackagesUpdate = () => {
+ },
isCurrent,
isHovered,
- ruleFilteredMappingPackages
+ itemFilteredMappingPackages
} = props;
-
- const [mappingPackages, setMappingPackages] = useState(ruleFilteredMappingPackages);
+ const [mappingPackages, setMappingPackages] = useState(itemFilteredMappingPackages);
const [projectMappingPackages, setProjectMappingPackages] = useState(initProjectMappingPackages ?? []);
- const [tempMappingPackages, setTempMappingPackages] = useState(ruleFilteredMappingPackages);
+ const [tempMappingPackages, setTempMappingPackages] = useState(itemFilteredMappingPackages);
const mappingPackagesDialog = useDialog();
@@ -62,28 +59,31 @@ export const ListTableMappingPackages = (props) => {
values['id'] = item._id;
values['project'] = sessionApi.getSessionProject();
values['refers_to_mapping_package_ids'] = tempMappingPackages;
- conceptualMappingRulesApi.updateItem(values)
+ genericTripleMapFragmentsApi.updateItem(values)
.then(res => {
setMappingPackages(tempMappingPackages);
item.refers_to_mapping_package_ids = tempMappingPackages;
- toastSuccess(conceptualMappingRulesApi.SECTION_ITEM_TITLE + ' updated');
+ toastSuccess(genericTripleMapFragmentsApi.SECTION_ITEM_TITLE + ' updated');
mappingPackagesDialog.handleClose();
onPackagesUpdate()
})
.catch(err => console.error(err))
};
- const ruleMappingPackages = projectMappingPackages.filter(x => mappingPackages.includes(x.id))
-
+ const itemMappingPackages = projectMappingPackages.filter(x => mappingPackages.includes(x.id))
const mappingPackagesDialogHandleClose = () => {
mappingPackagesDialog.handleClose();
- setTempMappingPackages(ruleFilteredMappingPackages);
+ setTempMappingPackages(itemFilteredMappingPackages);
}
return (<>
- {!!ruleMappingPackages.length &&
- {ruleMappingPackages.map(x => )}
+ {!!itemMappingPackages.length &&
+ {itemMappingPackages.map(
+ x =>
+
+
+
+ )}
}
{isHovered &&
- Package
+ Packages
{
onMouseLeave={() => setHoveredItem(null)}>
{projectMappingPackagesMap && {
const formik = useFormik({
initialValues: {
- use_latest_package_state: false,
+ use_only_package_state: false,
transform_test_data: true,
generate_cm_assertions: true,
validate_package: true,
@@ -79,7 +79,7 @@ const PackageRow = ({item, sectionApi}) => {
const data = {
package_id: item._id,
project_id: sessionApi.getSessionProject(),
- use_latest_package_state: values['use_latest_package_state']
+ use_only_package_state: values['use_only_package_state']
}
if (tasks_to_run.length > 0) {
data.tasks_to_run = tasks_to_run.join(',');
@@ -92,6 +92,10 @@ const PackageRow = ({item, sectionApi}) => {
}
});
+ const processTasksEnabled = () => {
+ return !formik.values.use_only_package_state;
+ }
+
const handleExport = itemId => {
setIsExporting(true)
const data = {
@@ -171,12 +175,11 @@ const PackageRow = ({item, sectionApi}) => {
}}
control={
formik.setFieldValue('use_latest_package_state', event.target.checked)}
+ checked={formik.values.use_only_package_state}
+ onChange={(event) => formik.setFieldValue('use_only_package_state', event.target.checked)}
/>
}
- label="Use latest Package State"
+ label="Use only the Package State"
/>
Processing a Mapping Package includes:
@@ -188,7 +191,8 @@ const PackageRow = ({item, sectionApi}) => {
}}
control={
formik.setFieldValue('transform_test_data', event.target.checked)}
/>
}
@@ -202,7 +206,8 @@ const PackageRow = ({item, sectionApi}) => {
}}
control={
formik.setFieldValue('generate_cm_assertions', event.target.checked)}
/>
}
@@ -216,7 +221,8 @@ const PackageRow = ({item, sectionApi}) => {
}}
control={
formik.setFieldValue('validate_package', event.target.checked)}
/>
}
@@ -230,8 +236,8 @@ const PackageRow = ({item, sectionApi}) => {
}}
control={
formik.setFieldValue('validate_package_shacl', event.target.checked)}
/>
}
@@ -245,8 +251,8 @@ const PackageRow = ({item, sectionApi}) => {
}}
control={
formik.setFieldValue('validate_package_xpath_sparql', event.target.checked)}
/>
}
diff --git a/mapping_workbench/frontend/src/sections/app/mapping-package/resources.js b/mapping_workbench/frontend/src/sections/app/mapping-package/resources.js
index 5102a4326..518e9a2e7 100644
--- a/mapping_workbench/frontend/src/sections/app/mapping-package/resources.js
+++ b/mapping_workbench/frontend/src/sections/app/mapping-package/resources.js
@@ -16,7 +16,8 @@ const Resources = ({item}) => {
0
+ && item.test_data_suites.map(x => x.id)) || ''
}}
/>
diff --git a/tests/e2e/backend/api/test_data_suite/test_routes.py b/tests/e2e/backend/api/test_data_suite/test_routes.py
index 287591964..4ea9cfdc3 100644
--- a/tests/e2e/backend/api/test_data_suite/test_routes.py
+++ b/tests/e2e/backend/api/test_data_suite/test_routes.py
@@ -30,7 +30,7 @@ async def test_test_data_suites_assign_mapping_packages_route(
assert response.status_code == 200
test_data_suite = await TestDataSuite.get(dummy_test_data_suite.id)
- assert test_data_suite.mapping_package_id == dummy_mapping_package.id
+ assert dummy_mapping_package.id in test_data_suite.refers_to_mapping_package_ids
await dummy_test_data_suite.delete()
assert not await TestDataSuite.get(dummy_test_data_suite.id)