diff --git a/docs/auth.md b/docs/auth.md index 69f234a9..e27e8317 100644 --- a/docs/auth.md +++ b/docs/auth.md @@ -1,6 +1,4 @@ -# Authentication and authorization - -## Authentication +# Authentication `py-ispyb` relies on plugins to handle different methods of authenticating users to the system. There are some mechanisms that are implemented natively like LDAP, keycloak and dummy that can be used out-of-the-box. However, it is worth noting that anyone can write his own plugin. @@ -189,33 +187,3 @@ class MyAuthentication(AbstractAuthentication): ... ``` - -### Authorization dependencies - -The following dependencies can be used to manage authentication and authorization rules. - -#### `permission_required(operator, [permissions])` - -Makes the route only accessible to users with the **specified permissions**. - -- `operator` is either - - `"any"` User should have **any** of the specified permissions - - `"all"` User should have **all** of the specified permissions - -#### `proposal_authorisation` - -Verifies that the user is **associated to the requested proposal**. To do so, it uses the `proposal_id` parameter. -User must verify any of the following conditions : - -- `Person.personId = Proposal.personId` -- `Person.personId = ProposalHasPerson.personId and ProposalHasPerson.proposalId = Proposal.proposalId` -- _has permission_ `all_proposals` - -#### `session_authorisation` - -Verifies that the user is **associated to the requested session**. To do so, it uses the `session_id` parameter. -User must verify any of the following conditions : - -- `Person.personId = Session_has_Person.personId and Session_has_Person.sessionId = BLSession.sessionId` -- `BLSession.proposalId = Proposal.proposalId and Person.personId = Proposal.personId` -- _has permission_ `all_sessions` diff --git a/docs/authorization.md b/docs/authorization.md new file mode 100644 index 00000000..0c6d30d5 --- /dev/null +++ b/docs/authorization.md @@ -0,0 +1,92 @@ +# Proposal(s), Session(s), and related entities + +Authorization is applied to all user facing resources in py-ISPyB and different permissions are available to grant users and staff access to entities related to the core of ISPyB. These include but not limited to: + +- Proposal +- Protein, Crystal, BLSample, Shipping, LabContact +- BLSession, DataCollectionGroup, DataCollection + +etc ... + +The authorization rules are applied in four ways: + +### Users + +- A user can access entities related to a Proposal and the DataCollection(s) in which they are a member of one or more Session(s) [linked via SessionHasPerson]. _This is an intrinsic permission and is the default behaviour if the user has no other permissions._ +- A user can access entities related to all Session(s) in a Proposal [linked via ProposalHasPerson] + +### Administrators + +- An administrator can view all Sessions on a Proposal for specific beamline(s) via a `BeamLineGroup` permission +- An administrator can access all Sessions and Proposals via `all_proposals` + +## BeamLineGroups + +Beamline groups provide a way to grant access to all Proposals, Sessions and related entities to a set of staff members for a particular group of beamlines. + +For example: + +```json +"beamLineGroups": [ + { + "groupName": "BL0x", + "uiGroup": "mx", + "permission": "bl0_admin", + "beamlines": [ + {"beamLineName": "BL01"}, + {"beamLineName": "BL02"}, + ], + }, +] +``` + +A staff member with the `bl0_admin` permission will be able to access Proposal(s) and Session(s) allocated on beamlines `BL01` and `BL02`, but not other beamlines. `uiGroup` specifies how this group should be rendered in the UI. + +# Permissions + +Routes can require a specific permission by using the `permission` dependency. + +```python +from pyispyb.dependencies import permission + + +@router.get( + "/path", +) +def get_something(depends: bool = Depends(permission("my_permission"))): + ... +``` + +# Deprecated Authorization Mechanisms + +These functions are deprecated and currently only used in the legacy API resources. They should not be used for new developments. + +## Authorization dependencies + +The following decorators can be used to manage authentication and authorization rules. + +### `permission_required(operator, [permissions])` + +Makes the route only accessible to users with the **specified permissions**. + +- `operator` is either + - `"any"` User should have **any** of the specified permissions + - `"all"` User should have **all** of the specified permissions + +### `proposal_authorisation` + +Verifies that the user is **associated to the requested proposal**. To do so, it uses the `proposal_id` parameter. +User must verify any of the following conditions : + +- `Person.personId = Proposal.personId` +- `Person.personId = ProposalHasPerson.personId and ProposalHasPerson.proposalId = Proposal.proposalId` +- _has permission_ `all_proposals` + +### `session_authorisation` + +Verifies that the user is **associated to the requested session**. To do so, it uses the `session_id` parameter. +User must verify any of the following conditions : + +- `Person.personId = Session_has_Person.personId and Session_has_Person.sessionId = BLSession.sessionId` +- `BLSession.proposalId = Proposal.proposalId and Person.personId = Proposal.personId` +- _has permission_ `all_sessions` diff --git a/mkdocs.yml b/mkdocs.yml index 8dd4dd57..53b2ceab 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,11 +1,12 @@ site_name: py-ISPyB nav: - Home: index.md - - Get started: run.md + - Get Started: run.md - Tests: tests.md - Configuration: conf.md - - Authentication and authorization: - - Basics: auth.md + - Authentication and Authorization: + - Authentication: auth.md + - Authorization: authorization.md - Permissions: permissions.md - Routes: - About: routes.md diff --git a/pyispyb/app/extensions/database/definitions.py b/pyispyb/app/extensions/database/definitions.py index 77ebfe55..16633a17 100644 --- a/pyispyb/app/extensions/database/definitions.py +++ b/pyispyb/app/extensions/database/definitions.py @@ -1,12 +1,16 @@ +import logging from typing import Optional, Any import sqlalchemy from sqlalchemy.orm import joinedload from ispyb import models +from pyispyb.app.extensions.options.schema import Options from pyispyb.app.globals import g from pyispyb.app.extensions.database.middleware import db +logger = logging.getLogger(__name__) + _session = sqlalchemy.func.concat( models.Proposal.proposalCode, models.Proposal.proposalNumber, @@ -19,38 +23,6 @@ ).label("proposal") -def get_blsession(session: str) -> Optional[models.BLSession]: - return ( - db.session.query(models.BLSession) - .join(models.Proposal) - .filter(_session == session) - .first() - ) - - -def with_auth_to_session( - query: "sqlalchemy.orm.Query[Any]", column: "sqlalchemy.Column[Any]" -) -> "sqlalchemy.orm.Query[Any]": - """Join relevant tables to authorise right through to SessionHasPerson - - in case of not being admin, can be reused""" - return ( - query.join(models.Proposal, column == models.Proposal.proposalId) - .join( - models.BLSession, models.BLSession.proposalId == models.Proposal.proposalId - ) - .join( - models.SessionHasPerson, - models.BLSession.sessionId == models.SessionHasPerson.sessionId, - ) - .join( - models.Person, - models.SessionHasPerson.personId == models.Person.personId, - ) - .filter(models.Person.login == g.login) - ) - - def get_current_person(login: str) -> Optional[models.Person]: person = ( db.session.query(models.Person) @@ -70,3 +42,107 @@ def get_current_person(login: str) -> Optional[models.Person]: person._metadata["permissions"] = permissions return person + + +def get_options() -> Options: + """Get db_options from app""" + # Avoid circular import + from pyispyb.app.main import app + + return app.db_options + + +def with_authorization( + query: "sqlalchemy.orm.Query[Any]", + includeArchived: bool = False, + proposalColumn: "sqlalchemy.Column[Any]" = None, + joinBLSession: bool = True, +) -> "sqlalchemy.orm.Query[Any]": + """Apply authorization to a query + + Checks in the following order: + * `all_proposals` allowing access to everything + * checks if the user is in a beamLineGroup to allow access to all proposals on a beamline + * checks ProposalHasPerson + * falls back to SessionHasPerson allowing access to entities related to where the + user is registered on a session + + Kwargs: + includeArchived: whether to exclude archived beamlines + proposalColumn: the column used to join to `models.Proposal`, will force a join with `models.Proposal` + joinBLSession: whether to join `models.BLSession` + joinSessionHasPerson: whether to join `models.SessionHasPerson` + """ + # `all_proposals`` can access all sessions + if "all_proposals" in g.permissions: + logger.info("user has `all_proposals`") + return query + + # Iterate through users permissions and match them to the relevant groups + beamLines = [] + permissions_applied = [] + db_options = get_options() + for group in db_options.beamLineGroups: + if group.permission in g.permissions: + permissions_applied.append(group.permission) + for beamLine in group.beamLines: + if (beamLine.archived and includeArchived) or not includeArchived: + beamLines.append(beamLine.beamLineName) + + if proposalColumn: + query = query.join( + models.Proposal, models.Proposal.proposalId == proposalColumn + ) + + if joinBLSession: + query = query.outerjoin( + models.BLSession, models.BLSession.proposalId == models.Proposal.proposalId + ) + + conditions = [] + if beamLines: + logger.info( + f"filtered to beamlines `{beamLines}` with permissions `{permissions_applied}`" + ) + + conditions.append(models.BLSession.beamLineName.in_(beamLines)) + + # Sessions + sessions = db.session.query(models.SessionHasPerson.sessionId).filter( + models.SessionHasPerson.personId == g.personId + ) + sessions = [r._asdict()["sessionId"] for r in sessions.all()] + conditions.append(models.BLSession.sessionId.in_(sessions if sessions else [])) + + # Proposals + proposals = db.session.query(models.ProposalHasPerson.proposalId).filter( + models.ProposalHasPerson.personId == g.personId + ) + proposals = [r._asdict()["proposalId"] for r in proposals.all()] + conditions.append(models.Proposal.proposalId.in_(proposals if proposals else [])) + + query = query.filter(sqlalchemy.or_(*conditions)) + return query + + +def groups_from_beamlines(beamLines: list[str]) -> list[list]: + """Get uiGroups from a list of beamlines""" + db_options = get_options() + groups = [] + for beamline in beamLines: + for group in db_options.beamLineGroups: + for groupBeamline in group.beamLines: + if beamline == groupBeamline.beamLineName: + groups.append(group.uiGroup) + + return list(set(groups)) + + +def beamlines_from_group(beamLineGroup: str) -> list[str]: + """Get a list of beamlines from a groupName""" + db_options = get_options() + for group in db_options.beamLineGroups: + if group.groupName == beamLineGroup: + return [beamline.beamLineName for beamline in group.beamLines] + + return [] diff --git a/pyispyb/app/extensions/database/utils.py b/pyispyb/app/extensions/database/utils.py index 7a01aa05..1cb42d7a 100644 --- a/pyispyb/app/extensions/database/utils.py +++ b/pyispyb/app/extensions/database/utils.py @@ -1,3 +1,4 @@ +import enum import os import time import logging @@ -17,45 +18,46 @@ def order( query: "sqlalchemy.orm.Query[Any]", sort_map: dict[str, "sqlalchemy.Column[Any]"], - order: str, - default: Optional[list[str]] = None, - order_by: Optional[str] = None, + order: Optional[dict[str]], + default: Optional[dict[str]] = None, ) -> "sqlalchemy.orm.Query[Any]": - """Sort a result set by a field + """Sort a result set by a column Args: query (sqlalchemy.query): The current query sort_map (dict): A mapping of field(str) -> sqlalchemy.Column - - Kwargs: - order_by (str): Field to sort by - order (Order): Asc or desc + order (dict): { order_by: column, order: Asc or desc } Returns query (sqlalchemy.orm.Query): The ordered query """ - if not (order_by and order) and not default: + if not (order and order["order_by"] and order["order"]) and not default: return query + logger.info(f"Ordering by {order['order_by']} {order['order']}") + if default: - order_by = default[0] - order = default[1] + return query.order_by( + getattr(sort_map[default["order_by"]], default["order"])() + ) - if order_by not in sort_map: - logger.warning(f"Unknown order_by {order_by}") + if order["order_by"].value not in sort_map: + logger.warning(f"Unknown order_by {order['order_by']}") return query - return query.order_by(getattr(sort_map[order_by], str(order))()) + return query.order_by( + getattr(sort_map[order["order_by"].value], order["order"].value)() + ) def page( - query: "sqlalchemy.orm.Query[Any]", skip: int, limit: int + query: "sqlalchemy.orm.Query[Any]", *, skip: int, limit: int ) -> "sqlalchemy.orm.Query[Any]": """Paginate a `Query` Kwargs: - per_page (str): Number of rows per page - page (str): Page number to display + skip (str): Offset to start at + limit(str): Number of items to display Returns query (sqlalchemy.orm.Query): The paginated query @@ -101,6 +103,17 @@ def with_metadata( return parsed +def update_model(model: any, values: dict[str, any]): + """Update a model with new values including nested models""" + for key, value in values.items(): + if isinstance(value, dict): + update_model(getattr(model, key), value) + else: + if isinstance(value, enum.Enum): + value = value.value + setattr(model, key, value) + + ENABLE_DEBUG_LOGGING = False diff --git a/pyispyb/app/extensions/options/base.py b/pyispyb/app/extensions/options/base.py index 40e99883..30d28e94 100644 --- a/pyispyb/app/extensions/options/base.py +++ b/pyispyb/app/extensions/options/base.py @@ -10,7 +10,7 @@ from ...globals import g from ..database.middleware import db from ..database.session import get_session -from .schema import Options, UIOptions +from .schema import Options, UIOptions, BeamLineGroup logger = logging.getLogger(__file__) @@ -21,6 +21,17 @@ def setup_options(app: ASGIApp): with get_session() as session: app.db_options = get_options(get_all=True, session=session) + if not app.db_options.beamLineGroups: + logger.warning("`beamLineGroups` are not configured, setting default empty") + app.db_options.beamLineGroups = [ + BeamLineGroup( + groupName="Empty", + uiGroup="empty", + permission="bl_admin", + beamLines=[], + ) + ] + def get_options(get_all: bool = False, session=None) -> Options: if not session: diff --git a/pyispyb/app/extensions/options/schema.py b/pyispyb/app/extensions/options/schema.py index 0df79ce9..0d84c26e 100644 --- a/pyispyb/app/extensions/options/schema.py +++ b/pyispyb/app/extensions/options/schema.py @@ -1,12 +1,43 @@ +from typing import Optional + from pydantic import BaseModel, Field +class BeamLineGroupBeamLine(BaseModel): + beamLineName: str = Field(title="Beamline Name") + sampleChangerType: Optional[str] = Field( + None, title="Sample Changer Type", nullable=True + ) + sampleChangerCapacity: Optional[int] = Field( + None, + title="Sample Changer Capacity", + description="If no specific type is available a capacity can be defined for the generic view", + nullable=True, + ) + archived: bool = Field( + False, + title="Archived", + description="Whether this beamline is archived (no longer displayed on landing page)", + ) + + +class BeamLineGroup(BaseModel): + groupName: str = Field(title="Group Name", descriptiopn="A group of beamlines") + uiGroup: str = Field(title="UI Group", description="Display type to use in the UI") + permission: str = Field( + title="Permission", + description="Permission required to view all proposals from these beamlines", + ) + beamLines: list[BeamLineGroupBeamLine] = Field([], title="Beamlines") + + class UIOptions(BaseModel): """Publicly available UI options""" motd: str = Field( "", title="Message of the Day", description="Displayed at the top of the UI" ) + beamLineGroups: list[BeamLineGroup] = Field([], title="Beamline Groups") class Options(UIOptions): diff --git a/pyispyb/app/routes/options.py b/pyispyb/app/routes/options.py index 2304b4e8..bac45078 100644 --- a/pyispyb/app/routes/options.py +++ b/pyispyb/app/routes/options.py @@ -1,4 +1,4 @@ -from fastapi import Depends, Request +from fastapi import Depends from ...dependencies import permission from ..base import AuthenticatedAPIRouter @@ -26,15 +26,17 @@ def get_options(depends: bool = Depends(permission("manage_options"))) -> Option return crud.get_options(get_all=True) -@router.post( +@router.patch( "", response_model=Options, ) def update_options( - options: Options, request: Request, depends=Depends(permission("manage_options")) + options: Options, depends=Depends(permission("manage_options")) ) -> Options: """Update the database options""" + from pyispyb.app.main import app + crud.update_options(options) options = crud.get_options(get_all=True) - request.app.db_options = options + app.db_options = options return options diff --git a/pyispyb/config.py b/pyispyb/config.py index 0cb929d9..26a02262 100644 --- a/pyispyb/config.py +++ b/pyispyb/config.py @@ -67,6 +67,9 @@ class Settings(BaseSettings): simulation_config: str = None + # Map file paths in the database to a different root directory + path_map: str = None + class Config: env_file = get_env_file() diff --git a/pyispyb/core/modules/datacollections.py b/pyispyb/core/modules/datacollections.py new file mode 100644 index 00000000..addb775e --- /dev/null +++ b/pyispyb/core/modules/datacollections.py @@ -0,0 +1,263 @@ +import logging +import os +from typing import Optional + +from sqlalchemy import func +from ispyb import models + +from ...app.extensions.database.definitions import ( + with_authorization, +) +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from .events import get_events +from ..schemas import datacollections as schema +from ...config import settings + +logger = logging.getLogger(__name__) + + +def get_datacollection_diffraction_image_path( + dataCollectionId: int, + snapshot: bool = False, +) -> Optional[str]: + query = ( + db.session.query( + ( + models.Image.jpegThumbnailFileFullPath + if snapshot + else models.Image.jpegFileFullPath + ).label("imagePath") + ) + .filter(models.Image.imageNumber == 1) + .filter(models.Image.dataCollectionId == dataCollectionId) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + ) + + query = with_authorization(query, joinBLSession=False) + first_image = query.first() + + if first_image: + if not os.path.exists(first_image.imagePath): + logger.warning( + f"Diffraction image {first_image.imagePath} for dataCollectionId {dataCollectionId} does not exist on disk" + ) + return None + + return first_image.imagePath + + +def get_datacollection_snapshot_path( + dataCollectionId: int, + imageId: int = 1, + snapshot: bool = False, +) -> Optional[str]: + datacollections = get_events( + dataCollectionId=dataCollectionId, + skip=0, + limit=1, + ) + try: + dc = datacollections.first["Item"] + except IndexError: + return None + + images = [ + "xtalSnapshotFullPath1", + "xtalSnapshotFullPath2", + "xtalSnapshotFullPath3", + "xtalSnapshotFullPath4", + ] + + image_path: str = getattr(dc, images[imageId - 1]) + if image_path is None: + return None + + if settings.path_map: + image_path = settings.path_map + image_path + + if snapshot: + ext = os.path.splitext(image_path)[1][1:].strip() + image_path_tmp = image_path.replace(f".{ext}", f"t.{ext}") + + # fallback incase snapshot doesnt exist + if os.path.exists(image_path_tmp): + image_path = image_path_tmp + + if not os.path.exists(image_path): + logger.warning( + f"{images[imageId - 1]} [{image_path}] for dataCollectionId {dataCollectionId} does not exist on disk" + ) + return None + + return image_path + + +def get_datacollection_analysis_image_path( + dataCollectionId: int, +) -> Optional[str]: + datacollections = get_events( + dataCollectionId=dataCollectionId, + skip=0, + limit=1, + ) + try: + dc = datacollections.first["Item"] + except IndexError: + return None + + image_path: str = dc.imageQualityIndicatorsPlotPath + if image_path is None: + return None + + if settings.path_map: + image_path = settings.path_map + image_path + + if not os.path.exists(image_path): + logger.warning( + f"imageQualityIndicatorsPlotPath [{dc.imageQualityIndicatorsPlotPath}] for dataCollectionId {dataCollectionId} does not exist on disk" + ) + return None + + return image_path + + +def get_datacollection_attachments( + skip: int, + limit: int, + dataCollectionId: Optional[int] = None, + dataCollectionGroupId: Optional[int] = None, + dataCollectionFileAttachmentId: Optional[int] = None, +) -> Paged[models.DataCollectionFileAttachment]: + metadata = { + "url": func.concat( + f"{settings.api_root}/datacollections/attachments/", + models.DataCollectionFileAttachment.dataCollectionFileAttachmentId, + ) + } + + query = ( + db.session.query(models.DataCollectionFileAttachment, *metadata.values()) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.DataCollectionFileAttachment.dataCollectionFileAttachmentId) + ) + + if dataCollectionId: + query = query.filter( + models.DataCollectionFileAttachment.dataCollectionId == dataCollectionId + ) + + if dataCollectionGroupId: + query = query.filter( + models.DataCollectionGroup.dataCollectionGroupId == dataCollectionGroupId + ) + + if dataCollectionFileAttachmentId: + query = query.filter( + models.DataCollectionFileAttachment.dataCollectionFileAttachmentId + == dataCollectionFileAttachmentId + ) + + query = with_authorization(query, joinBLSession=False) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + for result in results: + result._metadata["fileName"] = os.path.basename(result.fileFullPath) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_per_image_analysis( + skip: int, + limit: int, + dataCollectionId: Optional[int] = None, + dataCollectionGroupId: Optional[int] = None, +) -> Paged[schema.PerImageAnalysis]: + query = ( + db.session.query( + models.ImageQualityIndicators.imageNumber, + models.ImageQualityIndicators.totalIntegratedSignal, + models.ImageQualityIndicators.method2Res, + models.ImageQualityIndicators.goodBraggCandidates, + ) + .join( + models.DataCollection, + models.ImageQualityIndicators.dataCollectionId + == models.DataCollection.dataCollectionId, + ) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + ) + + if dataCollectionId: + query = query.filter(models.DataCollection.dataCollectionId == dataCollectionId) + + if dataCollectionGroupId: + query = query.filter( + models.DataCollectionGroup.dataCollectionGroupId == dataCollectionGroupId + ) + + query = with_authorization(query, joinBLSession=False) + query = page(query, skip=skip, limit=limit) + total = query.count() + + results = {"dataCollectionId": dataCollectionId} + for row in [r._asdict() for r in query.all()]: + for key in [ + "imageNumber", + "totalIntegratedSignal", + "method2Res", + "goodBraggCandidates", + ]: + if key not in results: + results[key] = [] + if row[key] is not None: + results[key].append(row[key]) + + return Paged(total=total, results=[results], skip=skip, limit=limit) + + +def get_workflow_steps( + skip: int, + limit: int, + workflowId: Optional[int] = None, + workflowStepId: Optional[int] = None, +) -> Paged[models.WorkflowStep]: + query = ( + db.session.query(models.WorkflowStep) + .join(models.Workflow) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + ) + + if workflowId: + query = query.filter(models.WorkflowStep.workflowId == workflowId) + + if workflowStepId: + query = query.filter(models.WorkflowStep.workflowStepId == workflowStepId) + + query = with_authorization(query, joinBLSession=False) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = query.all() + + for result in results: + result._metadata["attachments"] = {} + for file in ["imageResultFilePath", "resultFilePath", "htmlResultFilePath"]: + result._metadata["attachments"][file] = os.path.exists( + getattr(result, file) + ) + + return Paged(total=total, results=results, skip=skip, limit=limit) diff --git a/pyispyb/core/modules/events.py b/pyispyb/core/modules/events.py index b77614bd..1478775b 100644 --- a/pyispyb/core/modules/events.py +++ b/pyispyb/core/modules/events.py @@ -1,14 +1,64 @@ -from typing import Optional, Any +from dataclasses import dataclass, field +import enum +from typing import Any, List, Optional import os +from fastapi import HTTPException import sqlalchemy +from sqlalchemy import or_ from sqlalchemy.orm import contains_eager from sqlalchemy.sql.expression import literal_column from ispyb import models -from pyispyb.app.extensions.database.utils import Paged, page -from pyispyb.app.extensions.database.middleware import db +from ...app.extensions.database.definitions import ( + with_authorization, + _session, + _proposal, +) +from ...app.extensions.database.utils import Paged, page +from ...app.extensions.database.middleware import db from ..schemas import events as schema +from ...config import settings + + +@dataclass +class EntityType: + # The entity `DataCollection` or `EnergyScan` + entity: sqlalchemy.orm.decl_api.DeclarativeMeta + # How the entity joins to `BLSample` i.e. `DataCollection.blSampleId` + sampleId: "sqlalchemy.Column[Any]" + # Its primary key `dataCollectionId` + key: str + # Any joined entities i.e. `DataCollectionGroup`` + joined: Optional[List[sqlalchemy.orm.decl_api.DeclarativeMeta]] = field( + default_factory=list + ) + + +ENTITY_TYPES: dict[str, EntityType] = { + "dc": EntityType( + models.DataCollection, + models.DataCollectionGroup.blSampleId, + "dataCollectionId", + [ + models.DataCollection.DataCollectionGroup, + models.DataCollection.GridInfo, + [ + models.DataCollection.DataCollectionGroup, + models.DataCollectionGroup.Workflow, + ], + ], + ), + "robot": EntityType( + models.RobotAction, models.RobotAction.blsampleId, "robotActionId" + ), + "xrf": EntityType( + models.XFEFluorescenceSpectrum, + models.XFEFluorescenceSpectrum.blSampleId, + "xfeFluorescenceSpectrumId", + ), + "es": EntityType(models.EnergyScan, models.EnergyScan.blSampleId, "energyScanId"), +} def with_sample( @@ -36,97 +86,243 @@ def with_sample( return query +class EventStatus(str, enum.Enum): + success = "success" + failed = "failed" + processed = "processed" + processerror = "processerror" + + def get_events( skip: int, limit: int, + session: Optional[str] = None, sessionId: Optional[int] = None, + proposal: Optional[str] = None, + proposalId: Optional[int] = None, + beamLineName: Optional[str] = None, + dataCollectionId: Optional[int] = None, dataCollectionGroupId: Optional[int] = None, blSampleId: Optional[int] = None, proteinId: Optional[int] = None, + status: Optional[EventStatus] = None, + eventType: Optional[str] = None, ) -> Paged[schema.Event]: queries = {} - dataCollectionId = models.DataCollection.dataCollectionId + _dataCollectionId = models.DataCollection.dataCollectionId startTime = models.DataCollection.startTime endTime = models.DataCollection.endTime - duration = sqlalchemy.func.time_to_sec( - sqlalchemy.func.timediff( - models.DataCollection.endTime, + duration = ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), models.DataCollection.startTime, + models.DataCollection.endTime, ) + / 60 ) dataCollectionCount = literal_column("1") if dataCollectionGroupId is None: - duration = sqlalchemy.func.sum(duration) + duration = sqlalchemy.func.sum(duration) / ( + sqlalchemy.func.count(models.DataCollection.dataCollectionId) + / sqlalchemy.func.count( + sqlalchemy.distinct(models.DataCollection.dataCollectionId) + ) + ) # Return the first dataCollectionId in a group - dataCollectionId = sqlalchemy.func.min(models.DataCollection.dataCollectionId) # type: ignore - startTime = sqlalchemy.func.min(models.DataCollection.startTime) # type: ignore - endTime = sqlalchemy.func.max(models.DataCollection.endTime) # type: ignore + _dataCollectionId = sqlalchemy.func.max(models.DataCollection.dataCollectionId) + startTime = sqlalchemy.func.min(models.DataCollection.startTime) + endTime = sqlalchemy.func.max(models.DataCollection.endTime) dataCollectionCount = sqlalchemy.func.count( sqlalchemy.func.distinct(models.DataCollection.dataCollectionId) - ) # type: ignore + ) - queries["dc"] = db.session.query( - dataCollectionId.label("id"), - startTime.label("startTime"), - endTime.label("endTime"), - literal_column("'dc'").label("type"), - dataCollectionCount.label("count"), - ).join( - models.DataCollectionGroup, - models.DataCollectionGroup.dataCollectionGroupId - == models.DataCollection.dataCollectionGroupId, + queries["dc"] = ( + db.session.query( + _dataCollectionId.label("id"), + startTime.label("startTime"), + endTime.label("endTime"), + duration.label("duration"), + literal_column("'dc'").label("type"), + dataCollectionCount.label("count"), + sqlalchemy.func.count( + sqlalchemy.distinct( + models.DataCollectionFileAttachment.dataCollectionFileAttachmentId + ) + ).label("attachments"), + ) + .join( + models.DataCollectionGroup, + models.DataCollectionGroup.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.DataCollectionGroup.sessionId, + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .outerjoin(models.DataCollectionFileAttachment) ) - queries["robot"] = db.session.query( - models.RobotAction.robotActionId.label("id"), - models.RobotAction.startTimestamp.label("startTime"), - models.RobotAction.endTimestamp.label("endTime"), - literal_column("'robot'").label("type"), - literal_column("1").label("count"), + + queries["robot"] = ( + db.session.query( + models.RobotAction.robotActionId.label("id"), + models.RobotAction.startTimestamp.label("startTime"), + models.RobotAction.endTimestamp.label("endTime"), + ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), + models.RobotAction.startTimestamp, + models.RobotAction.endTimestamp, + ) + / 60 + ).label("duration"), + literal_column("'robot'").label("type"), + literal_column("1").label("count"), + literal_column("0").label("attachments"), + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.RobotAction.blsessionId, + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .group_by(models.RobotAction.robotActionId) ) - queries["xrf"] = db.session.query( - models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId.label("id"), - models.XFEFluorescenceSpectrum.startTime.label("startTime"), - models.XFEFluorescenceSpectrum.endTime.label("endTime"), - literal_column("'xrf'").label("type"), - literal_column("1").label("count"), + queries["xrf"] = ( + db.session.query( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId.label("id"), + models.XFEFluorescenceSpectrum.startTime.label("startTime"), + models.XFEFluorescenceSpectrum.endTime.label("endTime"), + ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), + models.XFEFluorescenceSpectrum.startTime, + models.XFEFluorescenceSpectrum.endTime, + ) + / 60 + ).label("duration"), + literal_column("'xrf'").label("type"), + literal_column("1").label("count"), + literal_column("0").label("attachments"), + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.XFEFluorescenceSpectrum.sessionId, + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .group_by(models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId) ) - queries["es"] = db.session.query( - models.EnergyScan.energyScanId.label("id"), - models.EnergyScan.startTime.label("startTime"), - models.EnergyScan.endTime.label("endTime"), - literal_column("'es'").label("type"), - literal_column("1").label("count"), + queries["es"] = ( + db.session.query( + models.EnergyScan.energyScanId.label("id"), + models.EnergyScan.startTime.label("startTime"), + models.EnergyScan.endTime.label("endTime"), + ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), + models.EnergyScan.startTime, + models.EnergyScan.endTime, + ) + / 60 + ).label("duration"), + literal_column("'es'").label("type"), + literal_column("1").label("count"), + literal_column("0").label("attachments"), + ) + .join( + models.BLSession, models.BLSession.sessionId == models.EnergyScan.sessionId + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .group_by(models.EnergyScan.energyScanId) ) + if session: + session_row = ( + db.session.query(models.BLSession) + .join(models.Proposal) + .filter(models.BLSession.session == session) + .first() + ) + + if proposal: + proposal_row = ( + db.session.query(models.Proposal) + .filter(models.Proposal.proposal == proposal) + .first() + ) + # Join sample information - _mapper = { - "dc": models.DataCollectionGroup.blSampleId, - "robot": models.RobotAction.blsampleId, - "xrf": models.XFEFluorescenceSpectrum.blSampleId, - "es": models.EnergyScan.blSampleId, - } - for key, _query in queries.items(): - queries[key] = with_sample(_query, _mapper[key], blSampleId, proteinId) - - # Filter by sessionid - if sessionId: + for key in queries.keys(): + # Add proposal, session + queries[key] = queries[key].add_columns( + _proposal, _session, models.BLSession.sessionId.label("sessionId") + ) + + # Add sample + queries[key] = with_sample( + queries[key], ENTITY_TYPES[key].sampleId, blSampleId, proteinId + ) + + # Apply permissions + queries[key] = with_authorization(queries[key], joinBLSession=False) + + # Filter by session + if session: + if session_row: + queries[key] = queries[key].filter( + models.BLSession.sessionId == session_row.sessionId + ) + + if sessionId: + queries[key] = queries[key].filter(models.BLSession.sessionId == sessionId) + + # Filter by proposal + if proposal: + if proposal_row: + queries[key] = queries[key].filter( + models.Proposal.proposalId == proposal_row.proposalId + ) + + if proposalId: + queries[key] = queries[key].filter(models.Proposal.proposalId == proposalId) + + # Filter by beamLineName + if beamLineName: + queries[key] = queries[key].filter( + models.BLSession.beamLineName == beamLineName + ) + + # Filter a single dataColleciton + if dataCollectionId: queries["dc"] = queries["dc"].filter( - models.DataCollectionGroup.sessionId == sessionId + models.DataCollection.dataCollectionId == dataCollectionId ) queries["robot"] = queries["robot"].filter( - models.RobotAction.blsessionId == sessionId + models.RobotAction.robotActionId == 0 ) queries["xrf"] = queries["xrf"].filter( - models.XFEFluorescenceSpectrum.sessionId == sessionId + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 ) - queries["es"] = queries["es"].filter(models.EnergyScan.sessionId == sessionId) + queries["es"] = queries["es"].filter(models.EnergyScan.energyScanId == 0) # Ungroup a dataCollectionGroup if dataCollectionGroupId: - queries["dc"] = queries["dc"].filter( - models.DataCollectionGroup.dataCollectionGroupId == dataCollectionGroupId + queries["dc"] = ( + queries["dc"] + .filter( + models.DataCollectionGroup.dataCollectionGroupId + == dataCollectionGroupId + ) + .group_by(models.DataCollection.dataCollectionId) ) queries["robot"] = queries["robot"].filter( models.RobotAction.robotActionId == 0 @@ -140,73 +336,152 @@ def get_events( models.DataCollectionGroup.dataCollectionGroupId ) - query: sqlalchemy.orm.Query[Any] = queries["dc"].union_all( + # Filter by status + if status: + if status == EventStatus.success: + queries["dc"] = queries["dc"].filter( + models.DataCollection.runStatus.like("%success%") + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.status.like("%success%") + ) + elif status == EventStatus.failed: + queries["dc"] = queries["dc"].filter( + models.DataCollection.runStatus.notlike("%success%") + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.status.notlike("%success%") + ) + elif status == EventStatus.processed: + queries["dc"] = ( + queries["dc"] + .join(models.AutoProcIntegration) + .join(models.AutoProcProgram) + .filter(models.AutoProcProgram.processingStatus == 1) + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.robotActionId == 0 + ) + elif status == EventStatus.processerror: + if not hasattr(models, "AutoProcProgramMessage"): + raise HTTPException( + status_code=500, + detail="Database does not have `AutoProcProgramMessage`", + ) + queries["dc"] = ( + queries["dc"] + .join(models.AutoProcIntegration) + # .outerjoin(models.ProcessingJob) + .join( + models.AutoProcProgram, + or_( + # models.ProcessingJob.processingJobId + # == models.AutoProcProgram.processingJobId, + models.AutoProcIntegration.autoProcProgramId + == models.AutoProcProgram.autoProcProgramId, + ), + ) + .join(models.AutoProcProgramMessage) + .filter( + or_( + models.AutoProcProgramMessage.severity == "WARNING", + models.AutoProcProgramMessage.severity == "ERROR", + ) + ) + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.robotActionId == 0 + ) + + queries["xrf"] = queries["xrf"].filter( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 + ) + queries["es"] = queries["es"].filter(models.EnergyScan.energyScanId == 0) + + # Filter by eventType + if eventType: + filters = { + "dc": queries["dc"].filter(models.DataCollection.dataCollectionId == 0), + "robot": queries["robot"].filter(models.RobotAction.robotActionId == 0), + "xrf": queries["xrf"].filter( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 + ), + "es": queries["es"].filter(models.EnergyScan.energyScanId == 0), + } + + tableFilters = ["robot", "es", "xrf"] + if eventType in tableFilters: + for query in tableFilters: + if eventType == query: + filters[query] = queries[query] + else: + filters["dc"] = queries["dc"].filter( + models.DataCollectionGroup.experimentType == eventType + ) + + for key, query_filter in filters.items(): + queries[key] = query_filter + + # Now union the four queries + query: sqlalchemy.orm.Query[Any] = queries["dc"].union( queries["robot"], queries["xrf"], queries["es"] ) total = query.count() - query = query.order_by(sqlalchemy.desc("startTime")) + query = query.order_by(sqlalchemy.desc("endTime")) query = page(query, skip=skip, limit=limit) + # Results contains an index of type / id results = query.all() results = [r._asdict() for r in results] - ids: dict[str, list[int]] = {} - types: dict[str, list[Any]] = { - "dc": [ - models.DataCollection, - "dataCollectionId", - models.DataCollection.DataCollectionGroup, - ], - "robot": [models.RobotAction, "robotActionId"], - "xrf": [models.XFEFluorescenceSpectrum, "xfeFluorescenceSpectrumId"], - "es": [models.EnergyScan, "energyScanId"], - } + # Build a list of ids to load based on type, i.e. a list of `dataCollectionId`s + entity_ids: dict[str, list[int]] = {} for result in results: - for name in types.keys(): + for name in ENTITY_TYPES.keys(): if result["type"] == name: - if name not in ids: - ids[name] = [] - ids[name].append(result["id"]) - - type_map = {} - for name, ty in types.items(): - if name in ids: - column = getattr(ty[0], ty[1]) - if len(ty) > 2: - items = ( - db.session.query(ty[0]) - .join(ty[2]) - .options(contains_eager(ty[2])) - .filter(column.in_(ids[name])) - .all() - ) - else: - items = db.session.query(ty[0]).filter(column.in_(ids[name])).all() - type_map[name] = {getattr(item, ty[1]): item for item in items} + if name not in entity_ids: + entity_ids[name] = [] + entity_ids[name].append(result["id"]) + + # Now load the related entities, i.e. load the `DataCollection` or `EnergyScan` + entity_type_map = {} + for name, entity_type in ENTITY_TYPES.items(): + if name in entity_ids: + column = getattr(entity_type.entity, entity_type.key) + query = db.session.query(entity_type.entity).filter( + column.in_(entity_ids[name]) + ) + # If there are joined entities load those too + if entity_type.joined: + for joined_entity in entity_type.joined: + if isinstance(joined_entity, list): + query = query.outerjoin(joined_entity[-1]).options( + contains_eager(*joined_entity) + ) + else: + query = query.outerjoin(joined_entity).options( + contains_eager(joined_entity) + ) + + entity_type_map[name] = { + getattr(entity, entity_type.key): entity for entity in query.all() + } + + # Merge the loaded entities back into the index's `Item` for result in results: - for name, ty in types.items(): - if result["type"] == name: - if name in type_map: - result["Item"] = type_map[name][result["id"]] + for entity_type_name in ENTITY_TYPES.keys(): + if result["type"] == entity_type_name: + if entity_type_name in entity_type_map: + result["Item"] = entity_type_map[entity_type_name][result["id"]] - if name == "dc": + if entity_type_name == "dc": _check_snapshots(result["Item"]) return Paged(total=total, results=results, skip=skip, limit=limit) -def get_datacollection(dataCollectionId: int) -> Optional[models.DataCollection]: - dc = ( - db.session.query(models.DataCollection) - .filter(models.DataCollection.dataCollectionId == dataCollectionId) - .first() - ) - - return dc - - def _check_snapshots(datacollection: models.DataCollection) -> models.DataCollection: snapshot_statuses = {} for i, snapshot in enumerate( @@ -219,7 +494,8 @@ def _check_snapshots(datacollection: models.DataCollection) -> models.DataCollec ): snapshot_path = getattr(datacollection, snapshot) if snapshot_path: - # snapshot_path = snapshot_path.replace("/data", "/Users/Shared/data") + if settings.path_map: + snapshot_path = settings.path_map + snapshot_path snapshot_statuses[i + 1] = ( os.path.exists(snapshot_path) if snapshot_path is not None else False ) @@ -227,32 +503,122 @@ def _check_snapshots(datacollection: models.DataCollection) -> models.DataCollec snapshot_statuses[i + 1] = False datacollection._metadata["snapshots"] = snapshot_statuses + + analysis = False + if hasattr(datacollection, "imageQualityIndicatorsPlotPath"): + if datacollection.imageQualityIndicatorsPlotPath: + analysis = os.path.exists(datacollection.imageQualityIndicatorsPlotPath) + datacollection._metadata["snapshots"]["analysis"] = analysis + + # diffraction_row: models.Image = ( + # db.session.query(models.Image) + # .filter(models.Image.imageNumber == 1) + # .filter(models.Image.dataCollectionId == datacollection.dataCollectionId) + # .first() + # ) + + # diffraction = False + # if diffraction_row: + # if diffraction_row.jpegThumbnailFileFullPath: + # diffraction = os.path.exists(diffraction_row.jpegThumbnailFileFullPath) + # datacollection._metadata["snapshots"]["diffraction"] = diffraction + return datacollection -def get_datacollection_snapshot_path( - dataCollectionId: int, imageId: int = 1, snapshot: bool = False -) -> Optional[str]: - dc = get_datacollection(dataCollectionId) - if not dc: - return None +def get_event_types( + session: Optional[str] = None, + sessionId: Optional[int] = None, + blSampleId: Optional[int] = None, + proteinId: Optional[int] = None, +) -> Paged[schema.EventType]: + queries = {} + queries["dc"] = db.session.query( + sqlalchemy.distinct(models.DataCollectionGroup.experimentType).label( + "experimentType" + ), + ).join( + models.BLSession, + models.BLSession.sessionId == models.DataCollectionGroup.sessionId, + ) - images = [ - "xtalSnapshotFullPath1", - "xtalSnapshotFullPath2", - "xtalSnapshotFullPath3", - "xtalSnapshotFullPath4", - ] + queries["robot"] = db.session.query( + sqlalchemy.func.count(models.RobotAction.robotActionId).label("count") + ).join( + models.BLSession, + models.BLSession.sessionId == models.RobotAction.blsessionId, + ) + queries["xrf"] = db.session.query( + sqlalchemy.func.count( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId + ).label("count") + ).join( + models.BLSession, + models.BLSession.sessionId == models.XFEFluorescenceSpectrum.sessionId, + ) + queries["es"] = db.session.query( + sqlalchemy.func.count(models.EnergyScan.energyScanId).label("count") + ).join(models.BLSession, models.BLSession.sessionId == models.EnergyScan.sessionId) + + if session: + session_row = ( + db.session.query(models.BLSession) + .join(models.Proposal) + .filter(models.BLSession.session == session) + .first() + ) + + for key in queries.keys(): + queries[key] = queries[key].join( + models.Proposal, + models.Proposal.proposalId == models.BLSession.proposalId, + ) + + if session: + if session_row: + queries[key] = queries[key].filter( + models.BLSession.sessionId == session_row.sessionId + ) + + if sessionId: + queries[key] = queries[key].filter(models.BLSession.sessionId == sessionId) - image_path: str = getattr(dc, images[imageId - 1]) - if image_path is None: - return None + if blSampleId: + queries[key] = queries[key].filter( + models.DataCollectionGroup.blSampleId == blSampleId + ) + + if proteinId: + queries[key] = ( + queries[key] + .join(models.BLSample) + .join(models.Crystal) + .join(models.Protein) + .filter(models.Protein.proteinId == proteinId) + ) - if snapshot: - ext = os.path.splitext(image_path)[1][1:].strip() - image_path = image_path.replace(f".{ext}", f"t.{ext}") + queries[key] = with_authorization(queries[key], joinBLSession=False) - if os.path.exists(image_path): - return image_path + queries[key] = [result._asdict() for result in queries[key].all()] - return None + eventTypes = [] + for eventType in queries["dc"]: + if eventType["experimentType"]: + eventTypes.append( + { + "eventType": eventType["experimentType"], + "eventTypeName": eventType["experimentType"], + } + ) + + for table, name in { + "robot": "Sample Actions", + "xrf": "XRF Spectrum", + "es": "Energy Scan", + }.items(): + if queries[table][0]["count"] > 0: + eventTypes.append({"eventType": table, "eventTypeName": name}) + + return Paged( + total=len(eventTypes), results=eventTypes, skip=0, limit=len(eventTypes) + ) diff --git a/pyispyb/core/modules/labcontacts.py b/pyispyb/core/modules/labcontacts.py index 3baf84f0..34475b8b 100644 --- a/pyispyb/core/modules/labcontacts.py +++ b/pyispyb/core/modules/labcontacts.py @@ -3,29 +3,43 @@ from sqlalchemy.orm import joinedload from ispyb import models -from pyispyb.app.extensions.database.utils import Paged, page -from pyispyb.app.extensions.database.middleware import db +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.utils import Paged, page, update_model +from ...app.extensions.database.middleware import db from ..schemas import labcontacts as schema +from .proposals import get_proposals def get_labcontacts( skip: int, limit: int, labContactId: Optional[int] = None, + proposal: str = None, proposalId: Optional[int] = None, + withAuthorization: bool = True, ) -> Paged[models.LabContact]: query = ( db.session.query(models.LabContact) .options(joinedload(models.LabContact.Person)) - .options(joinedload(models.LabContact.Person, models.Person.Laboratory)) # type: ignore + .options(joinedload(models.LabContact.Person, models.Person.Laboratory)) + .join( + models.Proposal, models.Proposal.proposalId == models.LabContact.proposalId + ) + .group_by(models.LabContact.labContactId) ) if labContactId: query = query.filter(models.LabContact.labContactId == labContactId) + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + if proposalId: query = query.filter(models.LabContact.proposalId == proposalId) + if withAuthorization: + query = with_authorization(query) + total = query.count() query = page(query, skip=skip, limit=limit) @@ -33,11 +47,13 @@ def get_labcontacts( def create_labcontact(labcontact: schema.LabContactCreate) -> models.LabContact: - labcontact_dict = labcontact.dict() person_dict = labcontact_dict.pop("Person") laboratory_dict = person_dict.pop("Laboratory") + proposals = get_proposals(proposalId=labcontact.proposalId, skip=0, limit=1) + proposals.first + laboratory = models.Laboratory(**laboratory_dict) db.session.add(laboratory) db.session.commit() @@ -54,3 +70,15 @@ def create_labcontact(labcontact: schema.LabContactCreate) -> models.LabContact: labContactId=int(contact.labContactId), skip=0, limit=1 ) return new_labcontact.first + + +def update_labcontact( + labContactId: int, labContact: schema.LabContactCreate +) -> models.LabContact: + labcontact_dict = labContact.dict(exclude_unset=True) + labconcat = get_labcontacts(labContactId=labContactId, skip=0, limit=1).first + + update_model(labconcat, labcontact_dict) + db.session.commit() + + return get_labcontacts(labContactId=labContactId, skip=0, limit=1).first diff --git a/pyispyb/core/modules/proposals.py b/pyispyb/core/modules/proposals.py index 89dd49e2..0d486fbe 100644 --- a/pyispyb/core/modules/proposals.py +++ b/pyispyb/core/modules/proposals.py @@ -1,8 +1,15 @@ from typing import Optional -from sqlalchemy.orm import joinedload, contains_eager + +from sqlalchemy import or_, func, distinct +from sqlalchemy.orm import joinedload from ispyb import models -from pyispyb.app.extensions.database.utils import Paged, page -from pyispyb.app.extensions.database.middleware import db + +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from ...app.extensions.database.definitions import ( + groups_from_beamlines, + with_authorization, +) def get_proposals( @@ -11,39 +18,63 @@ def get_proposals( proposalId: Optional[int] = None, proposalCode: Optional[str] = None, proposalNumber: Optional[str] = None, - proposalHasPerson: Optional[bool] = False, + proposal: Optional[str] = None, + search: Optional[str] = None, + withAuthorization: bool = True, ) -> Paged[models.Proposal]: + metadata = { + "persons": func.count(distinct(models.ProposalHasPerson.personId)), + "sessions": func.count(distinct(models.BLSession.sessionId)), + "beamLines": func.group_concat(distinct(models.BLSession.beamLineName)), + } - query = db.session.query(models.Proposal).options( - joinedload(models.Proposal.Person) + query = ( + db.session.query(models.Proposal, *metadata.values()) + .options(joinedload(models.Proposal.Person)) + .outerjoin(models.BLSession) + .outerjoin(models.ProposalHasPerson) + .order_by(models.Proposal.proposalId.desc()) + .group_by(models.Proposal.proposalId) ) if proposalId: query = query.filter(models.Proposal.proposalId == proposalId) + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + if proposalCode and proposalNumber: query = query.filter(models.Proposal.proposalCode == proposalCode) query = query.filter(models.Proposal.proposalNumber == proposalNumber) - if proposalHasPerson: - query = ( - query.outerjoin( - models.ProposalHasPerson, - models.Proposal.proposalId == models.ProposalHasPerson.proposalId, + if search: + query = query.filter( + or_( + models.Proposal.title.like(f"%{search}%"), + models.BLSession.beamLineName.like(search), + models.Proposal.proposal.like(f"%{search}%"), ) - .options(contains_eager("ProposalHasPerson")) - .outerjoin( - models.Person, - models.ProposalHasPerson.personId == models.Person.personId, - ) - .options(contains_eager("ProposalHasPerson.Person")) - .distinct() ) + if withAuthorization: + query = with_authorization(query, joinBLSession=False) + total = query.count() query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) - return Paged(total=total, results=query.all(), skip=skip, limit=limit) + for result in results: + result._metadata["beamLines"] = ( + result._metadata["beamLines"].split(",") + if result._metadata["beamLines"] + else [] + ) + + result._metadata["uiGroups"] = groups_from_beamlines( + result._metadata["beamLines"] + ) + + return Paged(total=total, results=results, skip=skip, limit=limit) def get_proposalHasPerson( diff --git a/pyispyb/core/modules/proteins.py b/pyispyb/core/modules/proteins.py index f4d89d02..8ba66c76 100644 --- a/pyispyb/core/modules/proteins.py +++ b/pyispyb/core/modules/proteins.py @@ -1,9 +1,21 @@ from typing import Optional -from sqlalchemy.orm import joinedload + +from sqlalchemy import or_, func, distinct +from sqlalchemy.orm import joinedload, contains_eager from ispyb import models -from pyispyb.app.extensions.database.utils import Paged, page -from pyispyb.app.extensions.database.middleware import db -from pyispyb.core.modules.utils import encode_external_id + + +from ...app.extensions.database.utils import Paged, page, with_metadata, order +from ...app.extensions.database.middleware import db +from ...app.extensions.database.definitions import with_authorization +from ...core.modules.utils import encode_external_id + + +ORDER_BY_MAP = { + "proteinId": models.Protein.proteinId, + "acronym": models.Protein.acronym, + "name": models.Protein.name, +} def get_proteins( @@ -11,14 +23,41 @@ def get_proteins( limit: int, proteinId: Optional[int] = None, proposalId: Optional[int] = None, + proposal: Optional[str] = None, externalId: Optional[int] = None, name: Optional[str] = None, acronym: Optional[str] = None, + search: Optional[str] = None, + sort_order: Optional[dict[str, str]] = None, + withAuthorization: bool = True, ) -> Paged[models.Protein]: - query = db.session.query(models.Protein).options( - joinedload(models.Protein.Proposal) + metadata = { + "pdbs": func.count(distinct(models.ProteinHasPDB.proteinid)), + "samples": func.count(distinct(models.BLSample.blSampleId)), + "crystals": func.count(distinct(models.Crystal.crystalId)), + } + + query = ( + db.session.query(models.Protein, *metadata.values()) + .options(joinedload(models.Protein.Proposal)) + .join(models.Proposal) + # .outerjoin( + # models.ConcentrationType, + # models.ConcentrationType.concentrationTypeId + # == models.Protein.concentrationTypeId, + # ) + # .options(contains_eager(models.Protein.ConcentrationType)) + .outerjoin(models.ComponentType) + .options(contains_eager(models.Protein.ComponentType)) + .outerjoin(models.ProteinHasPDB) + .outerjoin(models.Crystal) + .outerjoin(models.BLSample) + .group_by(models.Protein.proteinId) ) + if withAuthorization: + query = with_authorization(query) + if proteinId: query = query.filter(models.Protein.proteinId == proteinId) @@ -31,11 +70,54 @@ def get_proteins( if proposalId: query = query.filter(models.Protein.proposalId == proposalId) + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + if externalId: externalId = encode_external_id(externalId) query = query.filter(models.Protein.externalId == externalId) + if search: + query = query.filter( + or_( + models.Protein.name.like(f"%{search}%"), + models.Protein.acronym.like(f"%{search}%"), + ) + ) + + if sort_order: + query = order(query, ORDER_BY_MAP, sort_order) + total = query.count() query = page(query, skip=skip, limit=limit) - return Paged(total=total, results=query.all(), skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + protein_ids = [result.proteinId for result in results] + dc_query = ( + db.session.query( + models.Protein.proteinId, + func.count(distinct(models.DataCollection.dataCollectionId)).label( + "datacollections" + ), + ) + .join(models.Crystal) + .join(models.BLSample) + .join( + models.DataCollectionGroup, + models.BLSample.blSampleId == models.DataCollectionGroup.blSampleId, + ) + .join(models.DataCollection) + .filter(models.Protein.proteinId.in_(protein_ids)) + .group_by(models.Protein.proteinId) + ) + + dc_counts = {} + for dc in dc_query.all(): + row = dc._asdict() + dc_counts[row["proteinId"]] = row["datacollections"] + + for result in results: + result._metadata["datacollections"] = dc_counts.get(result.proteinId, 0) + + return Paged(total=total, results=results, skip=skip, limit=limit) diff --git a/pyispyb/core/modules/samples.py b/pyispyb/core/modules/samples.py index 94dd0df5..55b51458 100644 --- a/pyispyb/core/modules/samples.py +++ b/pyispyb/core/modules/samples.py @@ -1,24 +1,65 @@ +import enum from typing import Optional -from sqlalchemy.orm import contains_eager -from sqlalchemy.sql.expression import func, distinct +from sqlalchemy.orm import contains_eager, aliased, joinedload +from sqlalchemy.sql.expression import func, distinct, and_, literal_column from ispyb import models -from pyispyb.app.extensions.database.definitions import with_auth_to_session -from pyispyb.app.extensions.database.middleware import db -from pyispyb.app.extensions.database.utils import Paged, page, with_metadata +from ...config import settings +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.middleware import db +from ...app.extensions.database.utils import Paged, page, with_metadata, order + + +SAMPLE_ORDER_BY_MAP = { + "blSampleId": models.BLSample.blSampleId, + "name": models.BLSample.name, + "location": models.BLSample.location, + "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), +} + + +SAMPLE_STATUS_FILTERS = { + "Sample Action": func.count(models.RobotAction.robotActionId), + "Data Collected": func.count(models.DataCollection.dataCollectionId), + "Strategy": func.count(models.Screening.screeningId), + "Auto Integrated": func.count(models.AutoProcIntegration.autoProcIntegrationId), +} + +if hasattr(models, "ProcessingJob"): + SAMPLE_STATUS_FILTERS["Processed"] = func.count( + models.ProcessingJob.processingJobId + ) + +SAMPLE_STATUS_ENUM = enum.Enum( + "SampleStatus", {k: k for k in SAMPLE_STATUS_FILTERS.keys()} +) def get_samples( skip: int, limit: int, + search: Optional[str] = None, blSampleId: Optional[int] = None, proteinId: Optional[int] = None, - admin: Optional[bool] = False, + proposal: Optional[str] = None, + containerId: Optional[int] = None, + beamLineName: Optional[str] = None, + sort_order: Optional[dict[str, str]] = None, + status: Optional[SAMPLE_STATUS_ENUM] = None, ) -> Paged[models.BLSample]: metadata = { "subsamples": func.count(distinct(models.BLSubSample.blSubSampleId)), "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), + "types": func.group_concat(distinct(models.DataCollectionGroup.experimentType)), + "strategies": func.count(distinct(models.ScreeningOutput.screeningOutputId)), + "autoIntegrations": func.count( + distinct(models.AutoProcIntegration.autoProcIntegrationId) + ), + "integratedResolution": func.min( + models.AutoProcScalingStatistics.resolutionLimitHigh + ), + "proposal": models.Proposal.proposal, } query = ( @@ -36,7 +77,9 @@ def get_samples( ) .join(models.Crystal.Protein) .options( - contains_eager("Crystal.Protein").load_only("name", "acronym"), + contains_eager(models.BLSample.Crystal, models.Crystal.Protein).load_only( + "name", "acronym" + ), ) .outerjoin( models.BLSubSample, @@ -51,21 +94,241 @@ def get_samples( models.DataCollectionGroup.dataCollectionGroupId == models.DataCollection.dataCollectionGroupId, ) + .outerjoin(models.Screening) + .outerjoin( + models.ScreeningOutput, + and_( + models.Screening.screeningId == models.ScreeningOutput.screeningId, + models.ScreeningOutput.strategySuccess == 1, + ), + ) + .outerjoin(models.AutoProcIntegration) + .outerjoin(models.AutoProcScalingHasInt) + .outerjoin( + models.AutoProcScalingStatistics, + models.AutoProcScalingHasInt.autoProcScalingId + == models.AutoProcScalingStatistics.autoProcScalingId, + ) + .join( + models.Container, + models.BLSample.containerId == models.Container.containerId, + ) + .options( + contains_eager(models.BLSample.Container).load_only( + models.Container.code, + ) + ) + .join(models.Dewar, models.Container.dewarId == models.Dewar.dewarId) + .options( + contains_eager( + models.BLSample.Container, + models.Container.Dewar, + ).load_only( + models.Dewar.code, + ) + ) + .join(models.Shipping, models.Dewar.shippingId == models.Shipping.shippingId) + .options( + contains_eager( + models.BLSample.Container, models.Container.Dewar, models.Dewar.Shipping + ).load_only( + models.Shipping.shippingName, + ) + ) + .join(models.Proposal, models.Proposal.proposalId == models.Shipping.proposalId) .group_by(models.BLSample.blSampleId) ) - if not admin: - query = ( - query.join( - models.Container, - models.BLSample.containerId == models.Container.containerId, + if hasattr(models.ContainerQueueSample, "dataCollectionPlanId") and hasattr( + models.ContainerQueueSample, "blSampleId" + ): + query = query.outerjoin( + models.ContainerQueueSample, + models.BLSample.blSampleId == models.ContainerQueueSample.blSampleId, + ) + DataCollectionQueued: models.DataCollection = aliased(models.DataCollection) + query = query.outerjoin( + DataCollectionQueued, + models.ContainerQueueSample.dataCollectionPlanId + == DataCollectionQueued.dataCollectionPlanId, + ) + metadata["queued"] = func.IF( + func.count(models.ContainerQueueSample.containerQueueSampleId) + > func.count(DataCollectionQueued.dataCollectionId), + True, + False, + ) + + query = query.add_columns(metadata["queued"]) + else: + metadata["queued"] = literal_column("0") + query = query.add_columns(metadata["queued"]) + + if search: + query = query.filter( + models.BLSample.name.like(f"%{search}%"), + ) + + query = with_authorization(query) + + if blSampleId: + query = query.filter(models.BLSample.blSampleId == blSampleId) + + if proteinId: + query = query.filter(models.Protein.proteinId == proteinId) + + if containerId: + query = query.filter(models.Container.containerId == containerId) + + if proposal: + proposal_row = ( + db.session.query(models.Proposal) + .filter(models.Proposal.proposal == proposal) + .first() + ) + if proposal_row: + query = query.filter(models.Proposal.proposalId == proposal_row.proposalId) + + if beamLineName: + query = query.filter( + and_( + models.Dewar.dewarStatus == "processing", + models.Container.beamlineLocation == beamLineName, + models.Container.sampleChangerLocation != "", ) - .join(models.Dewar, models.Container.dewarId == models.Dewar.dewarId) - .join( - models.Shipping, models.Dewar.shippingId == models.Shipping.shippingId + ) + + if status: + if hasattr(models, "ProcessingJob"): + if status == SAMPLE_STATUS_ENUM.Processed: + query = query.join(models.ProcessingJob) + + if status.value == "Sample Action": + query = query.join( + models.RobotAction, + models.RobotAction.blsampleId == models.BLSample.blSampleId, ) + + query = query.having(SAMPLE_STATUS_FILTERS[status.value] > 0) + + if sort_order: + query = order( + query, + SAMPLE_ORDER_BY_MAP, + sort_order, + {"order_by": "blSampleId", "order": "desc"}, ) - query = with_auth_to_session(query, models.Shipping.proposalId) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + for result in results: + if result._metadata["types"]: + result._metadata["types"] = result._metadata["types"].split(",") + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +SUBSAMPLE_ORDER_BY_MAP = { + "blSubSampleId": models.BLSubSample.blSubSampleId, + "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), +} + +if hasattr(models.BLSubSample, "type"): + SUBSAMPLE_ORDER_BY_MAP["type"] = models.BLSubSample.type + + +def get_subsamples( + skip: int, + limit: int, + blSubSampleId: Optional[int] = None, + blSampleId: Optional[int] = None, + proteinId: Optional[int] = None, + proposal: Optional[str] = None, + containerId: Optional[int] = None, + sort_order: Optional[dict[str, str]] = None, +) -> Paged[models.BLSubSample]: + metadata = { + "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), + "types": func.group_concat(distinct(models.DataCollectionGroup.experimentType)), + } + + query = ( + db.session.query(models.BLSubSample, *metadata.values()) + .join(models.BLSubSample.BLSample) + .join(models.BLSample.Crystal) + .options( + contains_eager(models.BLSubSample.BLSample).load_only( + models.BLSample.name, + ) + ) + .join(models.Crystal.Protein) + .options( + contains_eager( + models.BLSubSample.BLSample, + models.BLSample.Crystal, + models.Crystal.Protein, + ).load_only("name", "acronym"), + ) + .outerjoin( + models.DataCollectionGroup, + models.DataCollectionGroup.blSampleId == models.BLSample.blSampleId, + ) + .outerjoin( + models.DataCollection, + models.DataCollectionGroup.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ) + .options( + joinedload(models.BLSubSample.Position1).load_only( + models.Position.posX, + models.Position.posY, + ) + ) + .options( + joinedload(models.BLSubSample.Position2).load_only( + models.Position.posX, + models.Position.posY, + ) + ) + .join( + models.Container, + models.BLSample.containerId == models.Container.containerId, + ) + .join(models.Dewar, models.Container.dewarId == models.Dewar.dewarId) + .join(models.Shipping, models.Dewar.shippingId == models.Shipping.shippingId) + .join(models.Proposal, models.Proposal.proposalId == models.Protein.proposalId) + .group_by(models.BLSubSample.blSubSampleId) + ) + + if hasattr(models.ContainerQueueSample, "dataCollectionPlanId"): + query = query.outerjoin( + models.ContainerQueueSample, + models.BLSubSample.blSubSampleId + == models.ContainerQueueSample.blSubSampleId, + ) + DataCollectionQueued: models.DataCollection = aliased(models.DataCollection) + query = query.outerjoin( + DataCollectionQueued, + models.ContainerQueueSample.dataCollectionPlanId + == DataCollectionQueued.dataCollectionPlanId, + ) + metadata["queued"] = func.IF( + func.count(models.ContainerQueueSample.containerQueueSampleId) + > func.count(DataCollectionQueued.dataCollectionId), + True, + False, + ) + query = query.add_columns(metadata["queued"]) + else: + metadata["queued"] = literal_column("0") + query = query.add_columns(metadata["queued"]) + + query = with_authorization(query) + + if blSubSampleId: + query = query.filter(models.BLSubSample.blSubSampleId == blSubSampleId) if blSampleId: query = query.filter(models.BLSample.blSampleId == blSampleId) @@ -73,6 +336,57 @@ def get_samples( if proteinId: query = query.filter(models.Protein.proteinId == proteinId) + if containerId: + query = query.filter(models.Container.containerId == containerId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + query = order(query, SUBSAMPLE_ORDER_BY_MAP, sort_order) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + for result in results: + if result._metadata["types"]: + result._metadata["types"] = result._metadata["types"].split(",") + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_sample_images( + skip: int, + limit: int, + blSampleId: Optional[int] = None, + blSampleImageId: Optional[int] = None, +) -> Paged[models.BLSampleImage]: + metadata = { + "url": func.concat( + f"{settings.api_root}/samples/images/", + models.BLSampleImage.blSampleImageId, + ) + } + + query = ( + db.session.query(models.BLSampleImage, *metadata.values()) + .join(models.BLSample) + .join( + models.Container, + models.BLSample.containerId == models.Container.containerId, + ) + .join(models.Dewar, models.Container.dewarId == models.Dewar.dewarId) + .join(models.Shipping, models.Dewar.shippingId == models.Shipping.shippingId) + ) + + if blSampleId: + query = query.filter(models.BLSample.blSampleId == blSampleId) + + if blSampleImageId: + query = query.filter(models.BLSampleImage.blSampleImageId == blSampleImageId) + + query = with_authorization(query, proposalColumn=models.Shipping.proposalId) + total = query.count() query = page(query, skip=skip, limit=limit) results = with_metadata(query.all(), list(metadata.keys())) diff --git a/pyispyb/core/modules/sessions.py b/pyispyb/core/modules/sessions.py index 5a163d82..c2416f0b 100644 --- a/pyispyb/core/modules/sessions.py +++ b/pyispyb/core/modules/sessions.py @@ -1,9 +1,18 @@ +from datetime import datetime, timedelta from typing import Optional + from ispyb import models +from sqlalchemy import func, and_, or_, extract, distinct from sqlalchemy.orm import joinedload, contains_eager -from pyispyb.app.extensions.database.utils import Paged, page -from pyispyb.app.extensions.database.middleware import db -from pyispyb.core.modules.utils import encode_external_id + +from ...app.extensions.database.definitions import ( + beamlines_from_group, + groups_from_beamlines, + with_authorization, +) +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from ...core.modules.utils import encode_external_id def get_sessions( @@ -13,29 +22,55 @@ def get_sessions( externalId: Optional[int] = None, expSessionPk: Optional[int] = None, proposalId: Optional[int] = None, - sessionHasPerson: Optional[bool] = False, + proposal: Optional[str] = None, + session: Optional[str] = None, + beamLineName: Optional[str] = None, + beamLineGroup: Optional[str] = None, + scheduled: Optional[bool] = None, + upcoming: Optional[bool] = None, + previous: Optional[bool] = None, + sessionType: Optional[str] = None, + month: Optional[int] = None, + year: Optional[int] = None, + withAuthorization: bool = True, ) -> Paged[models.BLSession]: - - query = db.session.query(models.BLSession) - - if sessionHasPerson: - query = ( - query.outerjoin( - models.SessionHasPerson, - models.BLSession.sessionId == models.SessionHasPerson.sessionId, - ) - .options(contains_eager("SessionHasPerson")) - .outerjoin( - models.Person, - models.SessionHasPerson.personId == models.Person.personId, - ) - .options(contains_eager("SessionHasPerson.Person")) - .distinct() - ) + metadata = { + "active": func.IF( + and_( + models.BLSession.startDate <= datetime.now(), + models.BLSession.endDate >= datetime.now(), + ), + True, + False, + ), + "active_soon": func.IF( + and_( + models.BLSession.startDate <= datetime.now() - timedelta(minutes=20), + models.BLSession.endDate >= datetime.now() + timedelta(minutes=20), + ), + True, + False, + ), + "sessionTypes": func.group_concat(distinct(models.SessionType.typeName)), + "persons": func.count(models.SessionHasPerson.personId), + } + + query = ( + db.session.query(models.BLSession, *metadata.values()) + .outerjoin(models.SessionType) + .join(models.Proposal) + .outerjoin(models.SessionHasPerson) + .options(contains_eager(models.BLSession.Proposal)) + .order_by(models.BLSession.startDate.desc()) + .group_by(models.BLSession.sessionId) + ) if sessionId: query = query.filter(models.BLSession.sessionId == sessionId) + if session: + query = query.filter(models.BLSession.session == session) + if externalId: externalId = encode_external_id(externalId) query = query.filter(models.BLSession.externalId == externalId) @@ -46,10 +81,112 @@ def get_sessions( if proposalId: query = query.filter(models.BLSession.proposalId == proposalId) + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + if beamLineName: + query = query.filter(models.BLSession.beamLineName == beamLineName) + + if scheduled: + query = query.filter(models.BLSession.scheduled == 1) + + if upcoming: + query = query.filter(models.BLSession.endDate >= datetime.now()) + query = query.order_by(models.BLSession.startDate) + + if previous: + query = query.filter(models.BLSession.endDate < datetime.now()) + + if sessionType: + query = query.filter(models.SessionType.typeName == sessionType) + + if month: + query = query.filter( + or_( + extract("month", models.BLSession.startDate) == month, + extract("month", models.BLSession.endDate) == month, + ) + ) + + if year: + query = query.filter( + or_( + extract("year", models.BLSession.startDate) == year, + extract("year", models.BLSession.endDate) == year, + ) + ) + + if beamLineGroup: + query = query.filter( + models.BLSession.beamLineName.in_(beamlines_from_group(beamLineGroup)) + ) + + if withAuthorization: + query = with_authorization(query, joinBLSession=False) + total = query.count() query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) - return Paged(total=total, results=query.all(), skip=skip, limit=limit) + dataCollections = ( + db.session.query( + func.count(models.DataCollection.dataCollectionId).label("count"), + models.DataCollectionGroup.sessionId, + ) + .join(models.DataCollectionGroup) + .filter( + models.DataCollectionGroup.sessionId.in_( + [result.sessionId for result in results] + ) + ) + .group_by(models.DataCollectionGroup.sessionId) + .all() + ) + dataCollectionCount = {} + for dataCollection in dataCollections: + dataCollectionDict = dataCollection._asdict() + dataCollectionCount[dataCollectionDict["sessionId"]] = dataCollectionDict[ + "count" + ] + + for result in results: + result._metadata["uiGroups"] = groups_from_beamlines([result.beamLineName]) + result._metadata["datacollections"] = dataCollectionCount.get( + result.sessionId, 0 + ) + result._metadata["sessionTypes"] = ( + result._metadata["sessionTypes"].split(",") + if result._metadata["sessionTypes"] + else [] + ) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_sessions_for_beamline_group( + beamLineGroup: Optional[str], + upcoming: Optional[bool] = None, + previous: Optional[bool] = None, + sessionType: Optional[str] = None, +) -> Paged[models.BLSession]: + beamLines = beamlines_from_group(beamLineGroup) + if not beamLines: + return Paged(total=0, results=[], skip=0, limit=0) + + sessions = [] + for beamLine in beamLines: + beamline_sessions = get_sessions( + skip=0, + limit=1, + beamLineName=beamLine, + upcoming=upcoming, + previous=previous, + sessionType=sessionType, + ) + + sessions.extend(beamline_sessions.results) + + return Paged(total=len(sessions), results=sessions, skip=0, limit=len(sessions)) def get_sessionHasPerson( diff --git a/pyispyb/core/routes/datacollections.py b/pyispyb/core/routes/datacollections.py new file mode 100644 index 00000000..2482c7ab --- /dev/null +++ b/pyispyb/core/routes/datacollections.py @@ -0,0 +1,184 @@ +import logging +import os +from typing import Optional + +from fastapi import Depends, HTTPException, Query +from fastapi.responses import FileResponse +from pydantic import conint +from ispyb import models + +from ...config import settings +from ...dependencies import pagination +from ...app.extensions.database.utils import Paged +from ... import filters +from ...app.base import AuthenticatedAPIRouter + +from ..modules import datacollections as crud +from ..schemas import datacollections as schema +from ..schemas.utils import paginated + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/datacollections", tags=["Data Collections"]) + + +@router.get("/images/diffraction/{dataCollectionId}", response_class=FileResponse) +def get_datacollection_diffraction_image( + dataCollectionId: int, + snapshot: bool = Query(False, description="Get snapshot image"), +) -> str: + """Get a data collection diffraction image""" + path = crud.get_datacollection_diffraction_image_path( + dataCollectionId, + snapshot, + ) + if not path: + raise HTTPException(status_code=404, detail="Image not found") + + return path + + +@router.get("/images/quality/{dataCollectionId}", response_class=FileResponse) +def get_datacollection_anaylsis_image( + dataCollectionId: int, +) -> str: + """Get a data collection per image analysis image""" + path = crud.get_datacollection_analysis_image_path( + dataCollectionId, + ) + if not path: + raise HTTPException(status_code=404, detail="Image not found") + + return path + + +@router.get("/images/{dataCollectionId}", response_class=FileResponse) +def get_datacollection_image( + dataCollectionId: int, + imageId: conint(ge=1, le=4) = Query(1, description="Image 1-4 to return"), + snapshot: bool = Query(False, description="Get snapshot image"), +) -> str: + """Get a data collection image""" + path = crud.get_datacollection_snapshot_path( + dataCollectionId, + imageId, + snapshot, + ) + if not path: + raise HTTPException(status_code=404, detail="Image not found") + + return path + + +@router.get( + "/attachments", response_model=paginated(schema.DataCollectionFileAttachment) +) +def get_datacollection_attachments( + page: dict[str, int] = Depends(pagination), + dataCollectionId: int = Depends(filters.dataCollectionId), + dataCollectionGroupId: int = Depends(filters.dataCollectionGroupId), +) -> Paged[models.DataCollectionFileAttachment]: + """Get a list of data collection attachments""" + return crud.get_datacollection_attachments( + dataCollectionId=dataCollectionId, + dataCollectionGroupId=dataCollectionGroupId, + **page, + ) + + +@router.get( + "/attachments/{dataCollectionFileAttachmentId}", + response_class=FileResponse, + responses={404: {"description": "No such data collection attachment"}}, +) +def get_datacollection_attachment( + dataCollectionFileAttachmentId: int, +): + """Get a data collection attachment""" + attachments = crud.get_datacollection_attachments( + dataCollectionFileAttachmentId=dataCollectionFileAttachmentId, + skip=0, + limit=1, + ) + + try: + attachment = attachments.first + file_path = attachment.fileFullPath + if settings.path_map: + file_path = settings.path_map + file_path + + if not os.path.exists(file_path): + logger.warning( + f"dataCollectionFileAttachmentId `{attachment.dataCollectionFileAttachmentId}` file `{file_path}` does not exist on disk" + ) + raise IndexError + return FileResponse(file_path, filename=attachment._metadata["fileName"]) + except IndexError: + raise HTTPException( + status_code=404, detail="Data collection attachment not found" + ) + + +@router.get( + "/quality", + response_model=paginated(schema.PerImageAnalysis), + responses={404: {"description": "A list of per image/point analysis"}}, +) +def get_per_image_analysis( + page: dict[str, int] = Depends(pagination), + dataCollectionId: int = Depends(filters.dataCollectionId), + dataCollectionGroupId: int = Depends(filters.dataCollectionGroupId), +) -> Paged[schema.PerImageAnalysis]: + """Get a list of per image/point analysis""" + return crud.get_per_image_analysis( + dataCollectionId=dataCollectionId, + dataCollectionGroupId=dataCollectionGroupId, + **page, + ) + + +@router.get("/workflows/steps", response_model=paginated(schema.WorkflowStep)) +def get_workflow_steps( + page: dict[str, int] = Depends(pagination), + workflowId: Optional[int] = Query(None, title="Workflow id"), + workflowStepId: Optional[int] = Query(None, title="Workflow step id"), +) -> Paged[models.WorkflowStep]: + """Get a list of workflow steps""" + return crud.get_workflow_steps( + workflowId=workflowId, + workflowStepId=workflowStepId, + **page, + ) + + +@router.get( + "/workflows/steps/{workflowStepId}", + response_class=FileResponse, + responses={404: {"description": "No such workflow step attachment"}}, +) +def get_workflow_step_attachment( + workflowStepId: int, attachmentType: schema.WorkflowStepAttachment +): + """Get a workflow step attachment""" + steps = crud.get_workflow_steps( + workflowStepId=workflowStepId, + skip=0, + limit=1, + ) + + try: + steps: models.WorkflowStep = steps.first + file_path = getattr(steps, attachmentType) + if settings.path_map: + file_path = settings.path_map + file_path + + if not os.path.exists(file_path): + logger.warning( + f"workflowStep.{attachmentType} `{workflowStepId}` file `{file_path}` does not exist on disk" + ) + raise IndexError + return FileResponse(file_path, filename=os.path.basename(file_path)) + except IndexError: + raise HTTPException( + status_code=404, detail="Workflow step attachment not found" + ) diff --git a/pyispyb/core/routes/events.py b/pyispyb/core/routes/events.py index f32063c2..c2ff2455 100644 --- a/pyispyb/core/routes/events.py +++ b/pyispyb/core/routes/events.py @@ -1,12 +1,10 @@ -from fastapi import Depends, Query, HTTPException -from fastapi.responses import FileResponse -from pydantic import conint +from typing import Optional +from fastapi import Depends -from pyispyb.app.extensions.database.definitions import get_blsession -from pyispyb.app.extensions.database.utils import Paged -from pyispyb.dependencies import pagination -from pyispyb import filters -from pyispyb.app.base import AuthenticatedAPIRouter +from ...app.extensions.database.utils import Paged +from ...dependencies import pagination +from ... import filters +from ...app.base import AuthenticatedAPIRouter from ..modules import events as crud from ..schemas import events as schema @@ -16,44 +14,55 @@ @router.get( - "/", + "", response_model=paginated(schema.Event), responses={404: {"description": "Entity not found"}}, ) def get_events( page: dict[str, int] = Depends(pagination), session: str = Depends(filters.session), + sessionId: int = Depends(filters.sessionId), + proposal: str = Depends(filters.proposal), + proposalId: str = Depends(filters.proposalId), + beamLineName: str = Depends(filters.beamLineName), + dataCollectionId: int = Depends(filters.dataCollectionId), dataCollectionGroupId: int = Depends(filters.dataCollectionGroupId), blSampleId: int = Depends(filters.blSampleId), proteinId: int = Depends(filters.proteinId), + status: crud.EventStatus = None, + eventType: Optional[str] = None, ) -> Paged[schema.Event]: """Get a list of events""" - sessionId = None - if session: - blSession = get_blsession(session) - if not blSession: - raise HTTPException(status_code=404, detail="Session not found") - sessionId = blSession.sessionId - return crud.get_events( - # sessionId is an int - sessionId=sessionId, # type: ignore + session=session, + sessionId=sessionId, + proposal=proposal, + proposalId=proposalId, + beamLineName=beamLineName, + dataCollectionId=dataCollectionId, dataCollectionGroupId=dataCollectionGroupId, blSampleId=blSampleId, proteinId=proteinId, + status=status, + eventType=eventType, **page ) -@router.get("/image/{dataCollectionId}", response_class=FileResponse) -def get_datacollection_image( - dataCollectionId: int, - imageId: conint(ge=1, le=4) = Query(0, description="Image 1-4 to return"), - snapshot: bool = Query(False, description="Get snapshot image"), -) -> str: - """Get a data collection image""" - path = crud.get_datacollection_snapshot_path(dataCollectionId, imageId, snapshot) - if not path: - raise HTTPException(status_code=404, detail="Image not found") - - return path +@router.get( + "/types", + response_model=paginated(schema.EventType), +) +def get_event_types( + session: str = Depends(filters.session), + sessionId: int = Depends(filters.sessionId), + blSampleId: int = Depends(filters.blSampleId), + proteinId: int = Depends(filters.proteinId), +) -> Paged[schema.EventType]: + """Get a list of event types""" + return crud.get_event_types( + session=session, + sessionId=sessionId, + blSampleId=blSampleId, + proteinId=proteinId, + ) diff --git a/pyispyb/core/routes/labcontacts.py b/pyispyb/core/routes/labcontacts.py index 7453ad48..57ae1cc9 100644 --- a/pyispyb/core/routes/labcontacts.py +++ b/pyispyb/core/routes/labcontacts.py @@ -1,24 +1,29 @@ +import logging + from fastapi import Depends, HTTPException, status from ispyb import models from pyispyb.dependencies import pagination from pyispyb.app.extensions.database.utils import Paged from pyispyb.app.base import AuthenticatedAPIRouter +from pyispyb import filters from ..modules import labcontacts as crud from ..schemas import labcontacts as schema -from ..schemas.utils import paginated +from ..schemas.utils import paginated, make_optional +logger = logging.getLogger(__name__) router = AuthenticatedAPIRouter(prefix="/labcontacts", tags=["Lab Contacts"]) -@router.get("/", response_model=paginated(schema.LabContact)) +@router.get("", response_model=paginated(schema.LabContact)) def get_lab_contacts( - page: dict[str, int] = Depends(pagination) + proposal: str = Depends(filters.proposal), + page: dict[str, int] = Depends(pagination), ) -> Paged[models.LabContact]: """Get a list of lab contacts""" - return crud.get_labcontacts(**page) + return crud.get_labcontacts(proposal=proposal, **page) @router.get( @@ -27,8 +32,12 @@ def get_lab_contacts( responses={404: {"description": "No such contact"}}, ) def get_lab_contact(labContactId: int) -> models.LabContact: - """Get a list of lab contacts""" - users = crud.get_labcontacts(labContactId=labContactId, skip=0, limit=1) + """Get a lab contact""" + users = crud.get_labcontacts( + labContactId=labContactId, + skip=0, + limit=1, + ) try: return users.first except IndexError: @@ -36,10 +45,52 @@ def get_lab_contact(labContactId: int) -> models.LabContact: @router.post( - "/", + "", response_model=schema.LabContact, status_code=status.HTTP_201_CREATED, ) def create_lab_contact(labcontact: schema.LabContactCreate) -> models.LabContact: """Create a new lab contact""" - return crud.create_labcontact(labcontact=labcontact) + try: + return crud.create_labcontact( + labcontact=labcontact, + ) + except IndexError: + raise HTTPException(status_code=404, detail="No such proposal") + + +LABCONTACT_UPDATE_EXCLUDED = { + "proposalId": True, + "Person": { + "givenName": True, + "familyname": True, + "Laboratory": {"laboratoryExtPk": True}, + }, +} + + +@router.patch( + "/{labContactId}", + response_model=schema.LabContact, + responses={ + 404: {"description": "No such lab contat"}, + 400: {"description": "Could not update lab contact"}, + }, +) +def update_lab_contact( + labContactId: int, + labContact: make_optional( + schema.LabContactCreate, + exclude=LABCONTACT_UPDATE_EXCLUDED, + ), +): + """Update a Lab Contact""" + try: + return crud.update_labcontact(labContactId, labContact) + except IndexError: + raise HTTPException(status_code=404, detail="Lab contact not found") + except Exception: + logger.exception( + f"Could not update labcontact `{labContactId}` with payload `{labContact}`" + ) + raise HTTPException(status_code=400, detail="Could not update lab contact") diff --git a/pyispyb/core/routes/legacy/data_collections.py b/pyispyb/core/routes/legacy/data_collections.py index 971300c6..5fab0bcc 100644 --- a/pyispyb/core/routes/legacy/data_collections.py +++ b/pyispyb/core/routes/legacy/data_collections.py @@ -27,7 +27,8 @@ from .base import router as legacy_router router = AuthenticatedAPIRouter( - prefix="/data_collections", tags=["Data collections - legacy with header token"] + prefix="/legacy/data_collections", + tags=["Data collections - legacy with header token"], ) diff --git a/pyispyb/core/routes/legacy/em.py b/pyispyb/core/routes/legacy/em.py index a3206faa..7b0a83af 100644 --- a/pyispyb/core/routes/legacy/em.py +++ b/pyispyb/core/routes/legacy/em.py @@ -13,7 +13,9 @@ from .base import router as legacy_router -router = AuthenticatedAPIRouter(prefix="/em", tags=["EM - legacy with header token"]) +router = AuthenticatedAPIRouter( + prefix="/legacy/em", tags=["EM - legacy with header token"] +) ############################ diff --git a/pyispyb/core/routes/legacy/proposals.py b/pyispyb/core/routes/legacy/proposals.py index efc5df18..c7771c0c 100644 --- a/pyispyb/core/routes/legacy/proposals.py +++ b/pyispyb/core/routes/legacy/proposals.py @@ -32,7 +32,7 @@ from .base import router as legacy_router router = AuthenticatedAPIRouter( - prefix="/proposals", tags=["Proposals - legacy with header token"] + prefix="/legacy/proposals", tags=["Proposals - legacy with header token"] ) diff --git a/pyispyb/core/routes/legacy/sessions.py b/pyispyb/core/routes/legacy/sessions.py index 2e8c518b..8c991668 100644 --- a/pyispyb/core/routes/legacy/sessions.py +++ b/pyispyb/core/routes/legacy/sessions.py @@ -32,7 +32,7 @@ from .base import router as legacy_router router = AuthenticatedAPIRouter( - prefix="/sessions", tags=["Sessions - legacy with header token"] + prefix="/legacy/sessions", tags=["Sessions - legacy with header token"] ) diff --git a/pyispyb/core/routes/proposals.py b/pyispyb/core/routes/proposals.py new file mode 100644 index 00000000..73e400cc --- /dev/null +++ b/pyispyb/core/routes/proposals.py @@ -0,0 +1,44 @@ +from fastapi import Depends, HTTPException +from ispyb import models + +from pyispyb.dependencies import pagination +from pyispyb.app.extensions.database.utils import Paged +from pyispyb import filters +from pyispyb.app.base import AuthenticatedAPIRouter + +from ..modules import proposals as crud +from ..schemas import proposals as schema +from ..schemas.utils import paginated + + +router = AuthenticatedAPIRouter(prefix="/proposals", tags=["Proposals"]) + + +@router.get("", response_model=paginated(schema.Proposal)) +def get_proposals( + search: str = Depends(filters.search), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.Proposal]: + """Get a list of proposals""" + return crud.get_proposals(search=search, **page) + + +@router.get( + "/{proposal}", + response_model=schema.Proposal, + responses={404: {"description": "No such proposal"}}, +) +def get_proposal( + proposal: str = Depends(filters.proposal), +) -> models.Proposal: + """Get a proposal""" + proposals = crud.get_proposals( + proposal=proposal, + skip=0, + limit=1, + ) + + try: + return proposals.first + except IndexError: + raise HTTPException(status_code=404, detail="Proposal not found") diff --git a/pyispyb/core/routes/proteins.py b/pyispyb/core/routes/proteins.py new file mode 100644 index 00000000..079a6369 --- /dev/null +++ b/pyispyb/core/routes/proteins.py @@ -0,0 +1,53 @@ +from fastapi import Depends, HTTPException +from ispyb import models + +from ...dependencies import order_by_factory, pagination +from ...app.extensions.database.utils import Paged +from ... import filters +from ...app.base import AuthenticatedAPIRouter + +from ..modules import proteins as crud +from ..schemas import protein as schema +from ..schemas.utils import paginated + + +router = AuthenticatedAPIRouter(prefix="/proteins", tags=["Proteins"]) + + +@router.get("", response_model=paginated(schema.Protein)) +def get_proteins( + page: dict[str, int] = Depends(pagination), + proteinId: int = Depends(filters.proteinId), + proposal: str = Depends(filters.proposal), + search: str = Depends(filters.search), + sort_order: dict = Depends(order_by_factory(crud.ORDER_BY_MAP, "ProteinOrder")), +) -> Paged[models.BLSample]: + """Get a list of proteins""" + return crud.get_proteins( + proteinId=proteinId, + proposal=proposal, + search=search, + sort_order=sort_order, + **page + ) + + +@router.get( + "/{proteinId}", + response_model=schema.Protein, + responses={404: {"description": "No such protein"}}, +) +def get_protein( + proteinId: int = Depends(filters.proteinId), +) -> models.Protein: + """Get a protein""" + proteins = crud.get_proteins( + proteinId=proteinId, + skip=0, + limit=1, + ) + + try: + return proteins.first + except IndexError: + raise HTTPException(status_code=404, detail="Protein not found") diff --git a/pyispyb/core/routes/samples.py b/pyispyb/core/routes/samples.py index fc73f860..b770940a 100644 --- a/pyispyb/core/routes/samples.py +++ b/pyispyb/core/routes/samples.py @@ -1,26 +1,131 @@ +import logging +import os +from typing import Optional + from fastapi import Depends, HTTPException +from fastapi.responses import FileResponse from ispyb import models -from pyispyb.dependencies import pagination -from pyispyb.app.extensions.database.utils import Paged -from pyispyb import filters -from pyispyb.app.base import AuthenticatedAPIRouter - +from ...config import settings +from ...dependencies import pagination, order_by_factory +from ...app.extensions.database.utils import Paged +from ... import filters +from ...app.base import AuthenticatedAPIRouter from ..modules import samples as crud from ..schemas import samples as schema from ..schemas.utils import paginated - +logger = logging.getLogger(__name__) router = AuthenticatedAPIRouter(prefix="/samples", tags=["Samples"]) -@router.get("/", response_model=paginated(schema.Sample)) +@router.get("/sub", response_model=paginated(schema.SubSample)) +def get_subsamples( + page: dict[str, int] = Depends(pagination), + blSampleId: int = Depends(filters.blSampleId), + proteinId: int = Depends(filters.proteinId), + proposal: str = Depends(filters.proposal), + containerId: int = Depends(filters.containerId), + sort_order: dict = Depends( + order_by_factory(crud.SUBSAMPLE_ORDER_BY_MAP, "SubSampleOrder") + ), +) -> Paged[models.BLSubSample]: + """Get a list of sub samples""" + return crud.get_subsamples( + blSampleId=blSampleId, + proteinId=proteinId, + proposal=proposal, + containerId=containerId, + sort_order=sort_order, + **page, + ) + + +@router.get( + "/sub/{blSubSampleId}", + response_model=schema.SubSample, + responses={404: {"description": "No such sub sample"}}, +) +def get_subsample( + blSubSampleId: int = Depends(filters.blSubSampleId), +) -> models.BLSubSample: + """Get a sub sample""" + subsamples = crud.get_subsamples( + blSubSampleId=blSubSampleId, + skip=0, + limit=1, + ) + + try: + return subsamples.first + except IndexError: + raise HTTPException(status_code=404, detail="Sub sample not found") + + +@router.get("/images", response_model=paginated(schema.SampleImage)) +def get_sample_images( + page: dict[str, int] = Depends(pagination), + blSampleId: int = Depends(filters.blSampleId), +) -> Paged[models.BLSampleImage]: + """Get a list of sample images""" + return crud.get_sample_images( + blSampleId=blSampleId, + **page, + ) + + +@router.get("/images/{blSampleImageId}", response_class=FileResponse) +def get_sample_image( + blSampleImageId: int, +): + """Get a sample image""" + sampleimages = crud.get_sample_images( + blSampleImageId=blSampleImageId, + limit=1, + skip=0, + ) + + try: + sampleimage = sampleimages.first + image_path = sampleimage.imageFullPath + if settings.path_map: + image_path = settings.path_map + image_path + + if not os.path.exists(image_path): + logger.warning( + f"blSampleImageId `{sampleimage.blSampleImageId}` file `{image_path}` does not exist on disk" + ) + raise IndexError + return image_path + + except IndexError: + raise HTTPException(status_code=404, detail="Sample image not found") + + +@router.get("", response_model=paginated(schema.Sample)) def get_samples( page: dict[str, int] = Depends(pagination), + search: str = Depends(filters.search), proteinId: int = Depends(filters.proteinId), + proposal: str = Depends(filters.proposal), + containerId: int = Depends(filters.containerId), + beamLineName: str = Depends(filters.beamLineName), + status: Optional[crud.SAMPLE_STATUS_ENUM] = None, + sort_order: dict = Depends( + order_by_factory(crud.SAMPLE_ORDER_BY_MAP, "SampleOrder") + ), ) -> Paged[models.BLSample]: """Get a list of samples""" - return crud.get_samples(proteinId=proteinId, **page) + return crud.get_samples( + search=search, + proteinId=proteinId, + proposal=proposal, + containerId=containerId, + beamLineName=beamLineName, + status=status, + sort_order=sort_order, + **page, + ) @router.get( @@ -31,8 +136,12 @@ def get_samples( def get_sample( blSampleId: int = Depends(filters.blSampleId), ) -> models.BLSample: - """Get a samples""" - samples = crud.get_samples(blSampleId=blSampleId, skip=0, limit=1) + """Get a sample""" + samples = crud.get_samples( + blSampleId=blSampleId, + skip=0, + limit=1, + ) try: return samples.first diff --git a/pyispyb/core/routes/sessions.py b/pyispyb/core/routes/sessions.py new file mode 100644 index 00000000..fc879779 --- /dev/null +++ b/pyispyb/core/routes/sessions.py @@ -0,0 +1,94 @@ +from typing import Optional + +from fastapi import Depends, HTTPException, Query +from ispyb import models + +from pyispyb.dependencies import pagination +from pyispyb.app.extensions.database.utils import Paged +from pyispyb import filters +from pyispyb.app.base import AuthenticatedAPIRouter + +from ..modules import sessions as crud +from ..schemas import sessions as schema +from ..schemas.utils import paginated + + +router = AuthenticatedAPIRouter(prefix="/sessions", tags=["Sessions"]) +PaginatedSession = paginated(schema.Session) + + +@router.get("", response_model=PaginatedSession) +def get_sessions( + proposal: str = Depends(filters.proposal), + beamLineName: str = Depends(filters.beamLineName), + beamLineGroup: Optional[str] = Query( + None, description="Show sessions for a beamLineGroup" + ), + scheduled: bool = Query(None, description="Get scheduled sessions only"), + upcoming: Optional[bool] = Query(False, description="Get the upcoming sessions"), + previous: Optional[bool] = Query( + False, description="Get the recently finished sessions" + ), + sessionType=Query( + None, description="Filter by session type, i.e. commissioning, remote" + ), + month: int = Depends(filters.month), + year: int = Depends(filters.year), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.BLSession]: + """Get a list of sessions""" + return crud.get_sessions( + proposal=proposal, + beamLineName=beamLineName, + beamLineGroup=beamLineGroup, + scheduled=scheduled, + upcoming=upcoming, + previous=previous, + sessionType=sessionType, + month=month, + year=year, + **page + ) + + +@router.get("/group", response_model=PaginatedSession) +def get_sessions_for_beamline_group( + beamLineGroup: str = Query(description="Beamline group to display session for"), + upcoming: Optional[bool] = Query(False, description="Get the upcoming sessions"), + previous: Optional[bool] = Query( + False, description="Get the recently finished sessions" + ), + sessionType=Query( + None, description="Filter by session type, i.e. commissioning, remote" + ), +): + """Get a list of sessions for a beamline group + Displays one session per beamline + """ + return crud.get_sessions_for_beamline_group( + beamLineGroup=beamLineGroup, + upcoming=upcoming, + previous=previous, + sessionType=sessionType, + ) + + +@router.get( + "/{sessionId}", + response_model=schema.Session, + responses={404: {"description": "No such session"}}, +) +def get_session( + sessionId: str = Depends(filters.sessionId), +) -> models.BLSession: + """Get a session""" + sessions = crud.get_sessions( + sessionId=sessionId, + skip=0, + limit=1, + ) + + try: + return sessions.first + except IndexError: + raise HTTPException(status_code=404, detail="Session not found") diff --git a/pyispyb/core/routes/user.py b/pyispyb/core/routes/user.py index f3b1dd1d..812f689c 100644 --- a/pyispyb/core/routes/user.py +++ b/pyispyb/core/routes/user.py @@ -1,6 +1,8 @@ from pydantic import BaseModel -from ...app.extensions.database.definitions import get_current_person +from pyispyb.app.extensions.options.schema import BeamLineGroup + +from ...app.extensions.database.definitions import get_current_person, get_options from ...app.base import AuthenticatedAPIRouter from ...app.globals import g @@ -12,6 +14,8 @@ class CurrentUser(BaseModel): familyName: str Permissions: list[str] personId: int + beamLineGroups: list[str] + beamLines: list[str] @router.get( @@ -20,10 +24,22 @@ class CurrentUser(BaseModel): ) def current_user() -> CurrentUser: person = get_current_person(g.login) + db_options = get_options() + beamLineGroups: list[BeamLineGroup] = db_options.beamLineGroups + groups = [] + beamLines = [] + for beamLineGroup in beamLineGroups: + if beamLineGroup.permission in g.permissions: + groups.append(beamLineGroup.groupName) + beamLines.extend( + [beamLine.beamLineName for beamLine in beamLineGroup.beamLines] + ) return { "personId": person.personId, "givenName": person.givenName, "familyName": person.familyName, "Permissions": g.permissions, + "beamLineGroups": groups, + "beamLines": list(set(beamLines)), } diff --git a/pyispyb/core/schemas/crystal.py b/pyispyb/core/schemas/crystal.py index 66c900b1..f7c018e6 100644 --- a/pyispyb/core/schemas/crystal.py +++ b/pyispyb/core/schemas/crystal.py @@ -44,6 +44,7 @@ class CrystalBase(BaseModel): class Crystal(CrystalBase): crystalId: int + proteinId: int = Field(title="Protein") Protein: Protein diff --git a/pyispyb/core/schemas/datacollections.py b/pyispyb/core/schemas/datacollections.py new file mode 100644 index 00000000..4d6d65cb --- /dev/null +++ b/pyispyb/core/schemas/datacollections.py @@ -0,0 +1,185 @@ +# import datetime + +import enum +from typing import Optional + +from pydantic import BaseModel, Field + + +class Workflow(BaseModel): + workflowId: int + comments: Optional[str] + status: Optional[str] + workflowTitle: Optional[str] + workflowType: Optional[str] + + class Config: + orm_mode = True + + +class WorkflowStepAttachment(str, enum.Enum): + imageResultFilePath = "imageResultFilePath" + # htmlResultFilePath = "htmlResultFilePath" + resultFilePath = "resultFilePath" + + +class WorkflowStepMetaData(BaseModel): + attachments: dict[str, bool] = Field(description="Attachment statuses") + + +class WorkflowStep(BaseModel): + workflowId: int + workflowStepId: int + workflowStepType: Optional[str] + status: Optional[str] + comments: Optional[str] + + metadata: WorkflowStepMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class DataCollectionGroup(BaseModel): + dataCollectionGroupId: int + experimentType: str + + Workflow: Optional[Workflow] + + class Config: + orm_mode = True + + +class GridInfo(BaseModel): + gridInfoId: int + + xOffset: Optional[float] + yOffset: Optional[float] + dx_mm: Optional[float] + dy_mm: Optional[float] + steps_x: Optional[float] + steps_y: Optional[float] + meshAngle: Optional[float] + orientation: Optional[str] + pixelsPerMicronX: Optional[float] + pixelsPerMicronY: Optional[float] + snapshot_offsetXPixel: Optional[float] + snapshot_offsetYPixel: Optional[float] + snaked: Optional[bool] + + class Config: + orm_mode = True + + +class DataCollectionMetaData(BaseModel): + snapshots: dict[str, bool] = Field(description="Snapshot statuses with ids 1-4") + + +class RotationAxis(str, enum.Enum): + omega = "omega" + phi = "phi" + + +class DataCollectionBase(BaseModel): + runStatus: Optional[str] = Field( + title="Status", description="`Successful` on success" + ) + + imageDirectory: Optional[str] = Field( + title="Directory", description="Directory where the data is saved" + ) + fileTemplate: Optional[str] = Field( + title="Data File Template", description="File template for data" + ) + imageContainerSubPath: Optional[str] = Field( + title="Image Sub Path", description="For hdf5 files, path to the images" + ) + numberOfImages: Optional[int] = Field(title="Number of Images / Points") + + wavelength: Optional[float] = Field(title="Wavelength", unit="Å") + exposureTime: Optional[float] = Field(title="Exposure Time", unit="s") + flux: Optional[float] = Field(title="Flux", unit="ph/s") + xBeam: Optional[float] = Field(title="Beam Position (Horizontal)", unit="pixels") + yBeam: Optional[float] = Field(title="Beam Position (Vertical)", unit="pixels") + beamSizeAtSampleX: Optional[float] = Field( + title="Beam Size at Sample (Horizontal)", unit="mm" + ) + beamSizeAtSampleY: Optional[float] = Field( + title="Beam Size at Sample (Vertical)", unit="mm" + ) + transmission: Optional[float] = Field(title="Beam Transmision", unit="%") + resolution: Optional[float] = Field( + title="Resolution", description="At edge of detector", unit="Å" + ) + detectorDistance: Optional[float] = Field(title="Detector Distance", unit="mm") + + axisStart: Optional[float] = Field(title="Rotation Axis Start", unit="°") + axisEnd: Optional[float] = Field(title="Rotation Axis End", unit="°") + axisRange: Optional[float] = Field(title="Rotation Axis Oscillation", unit="°") + rotationAxis: Optional[str] = Field(title="Rotation Axis Motor") + overlap: Optional[float] = Field(title="Rotation Axis Overlap", unit="°") + + phiStart: Optional[float] = Field(title="Phi Start", unit="°") + kappaStart: Optional[float] = Field(title="Kappa Start", unit="°") + omegaStart: Optional[float] = Field(title="Omega Start", unit="°") + chiStart: Optional[float] = Field(title="Chi Start", unit="°") + + xBeamPix: Optional[float] = Field(title="Beam size X", unit="pixels") + yBeamPix: Optional[float] = Field(title="Beam size Y", unit="pixels") + + # EM + magnification: Optional[int] = Field(title="Magnification", unit="x") + binning: Optional[int] = Field(title="Binning") + particleDiameter: Optional[float] = Field(title="Particle Diameter", unit="nm") + # boxSize_CTF: Optional[float] = Field(unit="pixels") + # minResolution: Optional[float] = Field(unit="A") + # minDefocus: Optional[float] = Field(unit="A") + # maxDefocus: Optional[float] = Field(unit="A") + defocusStepSize: Optional[float] = Field(unit="A") + amountAstigmatism: Optional[float] = Field(unit="A") + # extractSize: Optional[float] = Field(unit="pixels") + # bgRadius: Optional[float] = Field(unit="nm") + voltage: Optional[float] = Field(unit="kV") + objAperture: Optional[float] = Field(unit="um") + # c1aperture: Optional[float] = Field(unit="um") + # c2aperture: Optional[float] = Field(unit="um") + # c3aperture: Optional[float] = Field(unit="um") + # c1lens: Optional[float] = Field(unit="%") + # c2lens: Optional[float] = Field(unit="%") + # c3lens: Optional[float] = Field(unit="%") + + +class DataCollection(DataCollectionBase): + dataCollectionId: int + + DataCollectionGroup: DataCollectionGroup + GridInfo: Optional[list[GridInfo]] + + metadata: DataCollectionMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class DataCollectionFileAttachmentMetaData(BaseModel): + url: str = Field(description="Url to data collection file attachment") + fileName: str = Field(description="File name") + + +class DataCollectionFileAttachment(BaseModel): + dataCollectionFileAttachmentId: int + dataCollectionId: int + fileType: str + + metadata: DataCollectionFileAttachmentMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class PerImageAnalysis(BaseModel): + dataCollectionId: Optional[int] + imageNumber: Optional[list[int]] = Field(description="Scan point") + totalIntegratedSignal: Optional[list[float]] = Field(description="Total signal") + goodBraggCandidates: Optional[list[int]] = Field(description="Number of spots") + method2Res: Optional[list[float]] = Field(description="Estimated resolution") diff --git a/pyispyb/core/schemas/energyscan.py b/pyispyb/core/schemas/energyscan.py new file mode 100644 index 00000000..8d7b88ea --- /dev/null +++ b/pyispyb/core/schemas/energyscan.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class EnergyScan(BaseModel): + energyScanId: int + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/events.py b/pyispyb/core/schemas/events.py index 259c9b3c..9f30f390 100644 --- a/pyispyb/core/schemas/events.py +++ b/pyispyb/core/schemas/events.py @@ -1,70 +1,36 @@ -# import datetime - -from typing import Union, Optional from datetime import datetime +from typing import Union, Optional from pydantic import BaseModel, Field -from ispyb import models - -d = models.DataCollection -g = models.DataCollectionGroup -x = models.XFEFluorescenceSpectrum -r = models.RobotAction - - -class DataCollectionGroup(BaseModel): - dataCollectionGroupId: int - experimentType: str - - class Config: - orm_mode = True - - -class DataCollectionMetaData(BaseModel): - snapshots: dict[str, bool] = Field(description="Snapshot statuses with ids 1-4") - -class DataCollection(BaseModel): - runStatus: Optional[str] - wavelength: Optional[float] - exposureTime: Optional[float] - numberOfImages: Optional[int] - imageDirectory: Optional[str] - fileTemplate: Optional[str] - imageContainerSubPath: Optional[str] - beamSizeAtSampleX: Optional[float] - beamSizeAtSampleY: Optional[float] - - DataCollectionGroup: DataCollectionGroup - - metadata: DataCollectionMetaData = Field(alias="_metadata") - - class Config: - orm_mode = True - - -class RobotAction(BaseModel): - actionType: str - status: Optional[str] - message: Optional[str] - - class Config: - orm_mode = True +from .datacollections import DataCollection +from .robotactions import RobotAction +from .energyscan import EnergyScan +from .xfefluorescencespectrum import XFEFluorescenceSpectrum class EventBase(BaseModel): id: int type: str - startTime: datetime - endTime: Optional[datetime] + startTime: Optional[datetime] = Field(title="Start Time") + endTime: Optional[datetime] = Field(title="End Time") + duration: Optional[float] = Field(title="Duration", unit="min") count: int - blSample: Optional[str] = Field(description="Sample name") - blSampleId: Optional[int] = Field(description="Sample id") + session: Optional[str] + sessionId: int + proposal: str + blSample: Optional[str] = Field(description="Sample Name") + blSampleId: Optional[int] = Field(description="Sample Id") + attachments: Optional[int] = Field(description="No. of attachments") - Item: Union[DataCollection, RobotAction] + Item: Union[DataCollection, RobotAction, XFEFluorescenceSpectrum, EnergyScan] class Event(EventBase): class Config: orm_mode = True - # extra = 'forbid' + + +class EventType(BaseModel): + eventTypeName: str + eventType: str diff --git a/pyispyb/core/schemas/labcontacts.py b/pyispyb/core/schemas/labcontacts.py index 277b3043..65db84f8 100644 --- a/pyispyb/core/schemas/labcontacts.py +++ b/pyispyb/core/schemas/labcontacts.py @@ -2,7 +2,7 @@ import datetime from pydantic import BaseModel, Field -from pyispyb.core.schemas.laboratories import Laboratory +from pyispyb.core.schemas.laboratories import Laboratory, LaboratoryCreate class PersonBase(BaseModel): @@ -14,6 +14,10 @@ class PersonBase(BaseModel): Laboratory: Optional[Laboratory] +class PersonCreate(PersonBase): + Laboratory: Optional[LaboratoryCreate] = Field(title="Laboratory") + + class Person(PersonBase): # personId: int @@ -41,7 +45,7 @@ class LabContactBase(BaseModel): class LabContactCreate(LabContactBase): - pass + Person: PersonCreate = Field(title="Person") class LabContact(LabContactBase): @@ -53,3 +57,4 @@ class LabContact(LabContactBase): class Config: orm_mode = True + json_encoders = {datetime.datetime: lambda obj: obj.isoformat() + "+00:00"} diff --git a/pyispyb/core/schemas/laboratories.py b/pyispyb/core/schemas/laboratories.py index 4cf598b6..a2a65ac1 100644 --- a/pyispyb/core/schemas/laboratories.py +++ b/pyispyb/core/schemas/laboratories.py @@ -1,27 +1,43 @@ import datetime from typing import Optional -from pyispyb.core.schemas.validators import WordDashSpace + +# from pyispyb.core.schemas.validators import WordDashSpace from pydantic import BaseModel, Field class LaboratoryCreate(BaseModel): - name: str = WordDashSpace( - title="Laboratory Name", description="The Laboratory name" - ) + name: str = Field(title="Laboratory Name", description="The Laboratory name") address: str = Field(title="Address", description="The Laboratory Address") city: str = Field(title="City", description="The Laboratory City") country: str = Field(title="Country", description="The Laboratory Country") - url: Optional[str] = Field(title="URL", description="The Laboratory optional URL") - laboratoryExtPk: Optional[int] = Field( - title="laboratoryExtPk", description="External Id from the User Portal" + url: Optional[str] = Field( + title="URL", description="The Laboratory optional URL", nullable=True ) - recordTimeStamp: Optional[datetime.datetime] = Field( - title="recordTimeStamp", description="Time Laboratory was created" + laboratoryExtPk: Optional[int] = Field( + title="laboratoryExtPk", + description="External Id from the User Portal", + nullable=True, ) class Laboratory(LaboratoryCreate): laboratoryId: int + # Could be missing in db + name: Optional[str] = Field( + None, title="Laboratory Name", description="The Laboratory name" + ) + address: Optional[str] = Field( + None, title="Address", description="The Laboratory Address" + ) + city: Optional[str] = Field(None, title="City", description="The Laboratory City") + country: Optional[str] = Field( + None, title="Country", description="The Laboratory Country" + ) + recordTimeStamp: Optional[datetime.datetime] = Field( + title="recordTimeStamp", + description="Time Laboratory was created", + ) + class Config: orm_mode = True diff --git a/pyispyb/core/schemas/proposals.py b/pyispyb/core/schemas/proposals.py new file mode 100644 index 00000000..47fb95b7 --- /dev/null +++ b/pyispyb/core/schemas/proposals.py @@ -0,0 +1,39 @@ +# import datetime +import enum +from typing import Optional + +from pydantic import BaseModel, Field + + +class ProposalState(str, enum.Enum): + Open = "Open" + Closed = "Closed" + Cancelled = "Cancelled" + + +class ProposalMetaData(BaseModel): + persons: int = Field( + description="Number of people registered on this proposal (via ProposalHasPerson)" + ) + sessions: int = Field(description="Number of sessions") + beamLines: list[str] = Field(description="Beamlines allocated in this proposal") + uiGroups: Optional[list[str]] = Field( + description="UI groups allocated in this proposal" + ) + + +class ProposalBase(BaseModel): + proposalCode: str = Field(title="Proposal Code") + proposalNumber: str = Field(title="Proposal Number") + proposal: str + title: Optional[str] = Field(title="Proposal Title") + state: Optional[ProposalState] = Field(title="Proposal State") + + metadata: ProposalMetaData = Field(alias="_metadata") + + +class Proposal(ProposalBase): + proposalId: int + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/protein.py b/pyispyb/core/schemas/protein.py index 1dadd840..82915712 100644 --- a/pyispyb/core/schemas/protein.py +++ b/pyispyb/core/schemas/protein.py @@ -1,42 +1,54 @@ -""" -Project: py-ispyb. +from typing import Optional -https://github.com/ispyb/py-ispyb +from pydantic import BaseModel, Field -This file is part of py-ispyb software. -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" +class ComponentType(BaseModel): + componentTypeId: int + name: str + class Config: + orm_mode = True -__license__ = "LGPLv3+" +class ConcentrationType(BaseModel): + concentrationTypeId: int + name: str + symbol: str -from pydantic import BaseModel + class Config: + orm_mode = True -from ispyb import models -p = models.Protein +class ProteinMetaData(BaseModel): + pdbs: Optional[int] = Field(description="Number of attached pdbs") + crystals: Optional[int] = Field(description="Number of child crystals") + samples: Optional[int] = Field(description="Number of child samples") + datacollections: Optional[int] = Field(description="Number of data collections") class ProteinBase(BaseModel): name: str - acronym: str + acronym: str = Field(title="Acronym", description="A short name") + proposalId: int + sequence: Optional[str] = Field( + title="Sequence/SMILES", description="Sequence or chemical composition" + ) + density: Optional[float] = Field(title="Density", unit="g/L") + molecularMass: Optional[float] = Field(title="Mass", unit="kDa") + + containmentLevel: Optional[str] + hazardGroup: Optional[str] + safetyLevel: Optional[str] + + ComponentType: Optional[ComponentType] + # ConcentrationType: Optional[ConcentrationType] class Protein(ProteinBase): proteinId: int + metadata: Optional[ProteinMetaData] = Field(alias="_metadata") + class Config: orm_mode = True diff --git a/pyispyb/core/schemas/robotactions.py b/pyispyb/core/schemas/robotactions.py new file mode 100644 index 00000000..a1abb387 --- /dev/null +++ b/pyispyb/core/schemas/robotactions.py @@ -0,0 +1,12 @@ +from typing import Optional + +from pydantic import BaseModel + + +class RobotAction(BaseModel): + actionType: str + status: Optional[str] + message: Optional[str] + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/samples.py b/pyispyb/core/schemas/samples.py index 3117471c..b3b9604a 100644 --- a/pyispyb/core/schemas/samples.py +++ b/pyispyb/core/schemas/samples.py @@ -3,29 +3,129 @@ from typing import Optional from pydantic import BaseModel, Field -from ispyb import models from .crystal import Crystal -s = models.BLSample + +class Position(BaseModel): + posX: int + posY: int + + class Config: + orm_mode = True class SampleMetaData(BaseModel): subsamples: int = Field(description="Number of sub samples") datacollections: int = Field(description="Number of data collections") + types: Optional[list[str]] = Field( + description="Types of data collections", nullable=True + ) + queued: Optional[bool] = Field( + description="Whether this sample is queued for data collection" + ) + strategies: Optional[int] = Field(description="Number of successful strategies") + autoIntegrations: Optional[int] = Field( + description="Number of successful auto-integrations" + ) + integratedResolution: Optional[float] = Field( + description="Highest integration resolution", nullable=True + ) + proposal: Optional[str] = Field(description="The associated proposal") class SampleBase(BaseModel): name: str comments: Optional[str] = Field(title="Comments", nullable=True) + location: Optional[int] = Field( + title="Location", description="Location in container" + ) + containerId: Optional[int] + + metadata: Optional[SampleMetaData] = Field(alias="_metadata") + + +class SampleProtein(BaseModel): + proposalId: str + name: str + acronym: str + + class Config: + orm_mode = True + - metadata: SampleMetaData = Field(alias="_metadata") +class SampleCrystal(Crystal): + Protein: SampleProtein = Field(title="Protein") + + +class SampleContainer(BaseModel): + code: str + + sampleChangerLocation: Optional[str] = Field( + title="Sample Changer Location", description="Position in sample change" + ) + beamlineLocation: Optional[str] = Field( + title="Beamline Location", description="Beamline if container is assigned" + ) + + class Config: + orm_mode = True class Sample(SampleBase): blSampleId: int - Crystal: Crystal + Crystal: SampleCrystal = Field(title="Crystal") + Container: Optional[SampleContainer] = Field(title="Container") + + class Config: + orm_mode = True + + +class SampleImageMetaData(BaseModel): + url: str = Field(description="Url to sample image") + + +class SampleImage(BaseModel): + blSampleImageId: int + blSampleId: int + micronsPerPixelX: float + micronsPerPixelY: float + offsetX: int + offsetY: int + + metadata: SampleImageMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class SubSampleSample(BaseModel): + name: str + + class Config: + orm_mode = True + + +class SubSampleMetaData(BaseModel): + datacollections: int = Field(description="Number of data collections") + types: Optional[list[str]] = Field(description="Types of data collections") + + +class SubSampleBase(BaseModel): + type: Optional[str] = Field(title="Subsample Type") + comments: Optional[str] = Field(title="Comments", nullable=True) + blSampleId: int + + metadata: SubSampleMetaData = Field(alias="_metadata") + + +class SubSample(SubSampleBase): + blSubSampleId: int + + BLSample: SubSampleSample + Position1: Optional[Position] + Position2: Optional[Position] class Config: orm_mode = True diff --git a/pyispyb/core/schemas/sessions.py b/pyispyb/core/schemas/sessions.py new file mode 100644 index 00000000..9d809ff8 --- /dev/null +++ b/pyispyb/core/schemas/sessions.py @@ -0,0 +1,39 @@ +# import datetime +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +class SessionMetaData(BaseModel): + datacollections: Optional[int] = Field(description="Number of datacollections") + uiGroups: Optional[list[str]] = Field(description="UI groups for this session") + persons: int = Field( + description="Number of people registered on this session (via SessionHasPerson)" + ) + active: bool = Field(description="Whether this session is active") + active_soon: bool = Field( + description="Whether this session is due to start soon or has ended recently (+/-20 min)" + ) + sessionTypes: list[str] = Field(description="Session types for this session") + + +class SessionBase(BaseModel): + proposalId: int + session: str + proposal: str + visit_number: Optional[int] + startDate: datetime + endDate: datetime + beamLineName: str + beamLineOperator: Optional[str] + scheduled: Optional[bool] + + +class Session(SessionBase): + sessionId: int + + metadata: SessionMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/shipping.py b/pyispyb/core/schemas/shipping.py new file mode 100644 index 00000000..18215bc9 --- /dev/null +++ b/pyispyb/core/schemas/shipping.py @@ -0,0 +1,34 @@ +from typing import Optional +from pydantic import BaseModel, Field + + +class Shipping(BaseModel): + shippingName: str = Field(title="Name") + + class Config: + orm_mode = True + + +class Dewar(BaseModel): + code: str = Field(title="Name") + + Shipping: Shipping + + class Config: + orm_mode = True + + +class Container(BaseModel): + code: str = Field(title="Name") + + sampleChangerLocation: Optional[str] = Field( + description="Position in sample change" + ) + beamlineLocation: Optional[str] = Field( + description="Beamline if container is assigned" + ) + + Dewar: Dewar + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/userportalsync.py b/pyispyb/core/schemas/userportalsync.py index 308b8f4f..d76f3cbc 100644 --- a/pyispyb/core/schemas/userportalsync.py +++ b/pyispyb/core/schemas/userportalsync.py @@ -32,7 +32,7 @@ ) -class Person(PydanticPerson): +class UPPerson(PydanticPerson): # At least login or externalId required to be able to check for existing Person in DB (to update or create) login: Optional[str] = None externalId: Optional[int] = None @@ -64,17 +64,17 @@ class PersonSessionOptions(PydanticSessionHasPerson): ] -class PersonProposalLaboratory(Person): +class PersonProposalLaboratory(UPPerson): laboratory: Optional[LaboratoryCreate] -class PersonSessionLaboratory(Person): +class PersonSessionLaboratory(UPPerson): laboratory: Optional[LaboratoryCreate] # Optional section to be used in Session_has_Person session_options: Optional[PersonSessionOptions] -class LabContact(PydanticLabContact): +class UPLabContact(PydanticLabContact): # Person is required for a LabContact person: PersonProposalLaboratory # Make dewarAvgCustomsValue and dewarAvgTransportValue optional fields @@ -86,7 +86,7 @@ class Config: orm_mode = True -class Proposal(PydanticProposal): +class UPProposal(PydanticProposal): # proposalCode and proposalNumber required proposalCode: str proposalNumber: str @@ -95,13 +95,13 @@ class Proposal(PydanticProposal): # Here we need minimum 1 Person to be related to the Proposal (foreign key constraint) persons: conlist(PersonProposalLaboratory, min_items=1) # LabContacts are always related to a proposal - labcontacts: Optional[List[LabContact]] + labcontacts: Optional[List[UPLabContact]] class Config: orm_mode = True -class Protein(PydanticProtein): +class UPProtein(PydanticProtein): # It may sync by checking protein acronym and proposalId in DB acronym: str # Can also use externalId to be able to check for existing protein in DB (to update or create) @@ -112,7 +112,7 @@ class Config: orm_mode = True -class Session(PydanticSession): +class UPSession(PydanticSession): # expSessionPk or externalId to be able to check for existing session in DB (to update or create) # The expSessionPk field might be deprecated later expSessionPk: Optional[int] @@ -134,9 +134,9 @@ class Config: class PydanticProposal(BaseModel): - proposal: Proposal - sessions: Optional[List[Session]] - proteins: Optional[List[Protein]] + proposal: UPProposal + sessions: Optional[List[UPSession]] + proteins: Optional[List[UPProtein]] class UserPortalProposalSync(PydanticProposal): diff --git a/pyispyb/core/schemas/utils.py b/pyispyb/core/schemas/utils.py index 46818ead..641764ea 100644 --- a/pyispyb/core/schemas/utils.py +++ b/pyispyb/core/schemas/utils.py @@ -1,4 +1,6 @@ -from pydantic import BaseModel +from typing import Optional + +from pydantic import BaseModel, create_model from pydantic.main import ModelMetaclass @@ -14,3 +16,38 @@ class PaginatedModel(BaseModel): PaginatedModel.__qualname__ = cls_name return PaginatedModel + + +def make_optional(baseclass: BaseModel, *, exclude: dict[str, any] = {}) -> BaseModel: + """Make a pydantic models fields optional (for patch requests) + + Optionally exclude some fields (with nesting): + ``` + exclude={ + "proposalId": True, + "Person": { + "givenName": True, + "Laboratory": {"laboratoryExtPk": True}, + }, + } + ``` + """ + # https://stackoverflow.com/questions/67699451/make-every-fields-as-optional-with-pydantic + fields = baseclass.__fields__ + + validators = {"__validators__": baseclass.__validators__} + optional_fields = { + key: (Optional[item.type_], None) + for key, item in fields.items() + if exclude.get(key, None) is not True + } + new_model = create_model( + f"{baseclass.__name__}Optional", **optional_fields, __validators__=validators + ) + + # Deal with nested models + for key, item in new_model.__fields__.items(): + if item.is_complex(): + item.type_ = make_optional(item.type_, exclude=exclude.get(key, {})) + + return new_model diff --git a/pyispyb/core/schemas/xfefluorescencespectrum.py b/pyispyb/core/schemas/xfefluorescencespectrum.py new file mode 100644 index 00000000..dee6c944 --- /dev/null +++ b/pyispyb/core/schemas/xfefluorescencespectrum.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class XFEFluorescenceSpectrum(BaseModel): + xfeFluorescenceSpectrumId: int + + class Config: + orm_mode = True diff --git a/pyispyb/dependencies.py b/pyispyb/dependencies.py index 984543c4..49c6ecbe 100644 --- a/pyispyb/dependencies.py +++ b/pyispyb/dependencies.py @@ -1,7 +1,8 @@ import enum -from typing import Optional, Any +from typing import Callable, Optional, Any from fastapi import HTTPException, Query +from pydantic import conint from .app.globals import g @@ -12,22 +13,27 @@ class Order(str, enum.Enum): def pagination( - skip: Optional[int] = Query(0, description="Results to skip"), - limit: Optional[int] = Query(25, description="Number of results to show"), + skip: Optional[conint(ge=0)] = Query(0, description="Results to skip"), + limit: Optional[conint(gt=0)] = Query(25, description="Number of results to show"), ) -> dict[str, int]: return {"skip": skip, "limit": limit} -def order_by( - order_by: Optional[str] = Query(None, description="Field to order by"), - order: Optional[Order] = Query("asc", description="Order direction"), -) -> dict[str, Any]: - order_fields = {"order_by": order_by} +def order_by_factory(columns: dict[str], enumName: str) -> Callable: + order_by_enum = enum.Enum(enumName, {k: k for k in columns.keys()}) - if order: + def order_by( + order_by: Optional[order_by_enum] = Query( + None, description="Field to order by" + ), + order: Optional[Order] = Query(Order.asc, description="Order direction"), + ) -> dict[str, Any]: + order_fields = {"order_by": order_by} order_fields["order"] = order - return order_fields + return order_fields + + return order_by def filter(filter: str) -> str: diff --git a/pyispyb/filters.py b/pyispyb/filters.py index 02fb9393..ee3d4fff 100644 --- a/pyispyb/filters.py +++ b/pyispyb/filters.py @@ -10,6 +10,32 @@ def session( return session +def sessionId( + sessionId: Optional[int] = Query(None, description="Session id to filter by") +) -> Optional[int]: + return sessionId + + +def proposal( + proposal: Optional[str] = Query( + None, description="Proposal name to filter by", regex=r"^\w+\d+$" + ) +) -> Optional[str]: + return proposal + + +def proposalId( + proposalId: Optional[int] = Query(None, description="Proposal id to filter by") +) -> Optional[int]: + return proposalId + + +def beamLineName( + beamLineName: Optional[str] = Query(None, description="Beamline name to filter by") +) -> Optional[str]: + return beamLineName + + def dataCollectionGroupId( dataCollectionGroupId: Optional[int] = Query( None, description="Data collection group id to filter by" @@ -18,13 +44,51 @@ def dataCollectionGroupId( return dataCollectionGroupId +def dataCollectionId( + dataCollectionId: Optional[int] = Query( + None, description="Data collection id to filter by" + ) +) -> Optional[int]: + return dataCollectionId + + def blSampleId( blSampleId: Optional[int] = Query(None, description="Sample id to filter by") ) -> Optional[int]: return blSampleId +def blSubSampleId( + blSubSampleId: Optional[int] = Query(None, description="Sub sample id to filter by") +) -> Optional[int]: + return blSubSampleId + + def proteinId( proteinId: Optional[int] = Query(None, description="Protein id to filter by") ) -> Optional[int]: return proteinId + + +def search( + search: str = Query(None, description="Search string to filter by") +) -> Optional[str]: + return search + + +def containerId( + containerId: Optional[int] = Query(None, description="Container id to filter by") +) -> Optional[int]: + return containerId + + +def month( + month: Optional[str] = Query(None, description="Month filter by", regex=r"^\d\d?$") +) -> Optional[str]: + return month + + +def year( + year: Optional[str] = Query(None, description="Year filter by", regex=r"^\d\d\d\d$") +) -> Optional[str]: + return year diff --git a/requirements.txt b/requirements.txt index a1c8e543..ac518fbe 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -ispyb-models==1.0.2 +ispyb-models==1.0.6 fastapi pydantic[dotenv] diff --git a/tests/conftest.py b/tests/conftest.py index e1a30f90..f0891e31 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,6 +6,7 @@ from pyispyb.config import settings from pyispyb.app.main import app as _app from tests.authclient import AuthClient +from tests.core.api.utils.permissions import mock_permissions @pytest.fixture() @@ -53,3 +54,28 @@ def short_session(): yield new_token_exp_time settings.token_exp_time = old_token_exp_time + + +@pytest.fixture +def with_beamline_groups(auth_client_efgh: AuthClient, app: ASGIApp): + with mock_permissions(["manage_options"], app): + resp = auth_client_efgh.patch( + "/options", + payload={ + "beamLineGroups": [ + { + "groupName": "BL0x", + "uiGroup": "mx", + "permission": "bl_admin", + "beamLines": [ + {"beamLineName": "BL01"}, + {"beamLineName": "BL02"}, + ], + }, + ] + }, + ) + + assert resp.status_code == 200 + + yield diff --git a/tests/core/api/admin/test_options.py b/tests/core/api/admin/test_options.py index e845be33..2789a411 100644 --- a/tests/core/api/admin/test_options.py +++ b/tests/core/api/admin/test_options.py @@ -14,6 +14,6 @@ def test_get_options(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIA def test_post_option(auth_client_abcd: AuthClient): - response = auth_client_abcd.post("/options") + response = auth_client_abcd.patch("/options") assert response.status_code == 403 diff --git a/tests/core/api/data/datacollections.py b/tests/core/api/data/datacollections.py new file mode 100644 index 00000000..49f43e5e --- /dev/null +++ b/tests/core/api/data/datacollections.py @@ -0,0 +1,107 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_dc_attachments = [ + ApiTestElem( + name="List dc attachments", + input=ApiTestInput( + login="abcd", + route="/datacollections/attachments", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List dc attachments (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/datacollections/attachments", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get dc attachments", + input=ApiTestInput( + login="abcd", + route="/datacollections/attachments/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] + +test_dc_images = [ + ApiTestElem( + name="Get datacollection image", + input=ApiTestInput( + login="abcd", + route="/datacollections/images/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="Get datacollection image (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/datacollections/images/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="Get datacollection diffraction image", + input=ApiTestInput( + login="abcd", + route="/datacollections/images/diffraction/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="Get datacollection image quality image", + input=ApiTestInput( + login="abcd", + route="/datacollections/images/quality/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] + +test_workflows = [ + ApiTestElem( + name="Get workflow steps", + input=ApiTestInput( + login="abcd", + route="/datacollections/workflows/steps?workflowStepId=1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get workflow step attachment", + input=ApiTestInput( + login="abcd", + route="/datacollections/workflows/steps/1?attachmentType=imageResultFilePath", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] diff --git a/tests/core/api/data/events.py b/tests/core/api/data/events.py index 4b425ccf..bd671dcf 100644 --- a/tests/core/api/data/events.py +++ b/tests/core/api/data/events.py @@ -13,4 +13,15 @@ code=200, ), ), + ApiTestElem( + name="get event types", + input=ApiTestInput( + permissions=[], + login="abcd", + route="/events/types", + ), + expected=ApiTestExpected( + code=200, + ), + ), ] diff --git a/tests/core/api/data/authorization.py b/tests/core/api/data/legacy/authorization.py similarity index 84% rename from tests/core/api/data/authorization.py rename to tests/core/api/data/legacy/authorization.py index 9809d3a9..bf64d620 100644 --- a/tests/core/api/data/authorization.py +++ b/tests/core/api/data/legacy/authorization.py @@ -8,7 +8,7 @@ "all_sessions", ], login="efgh", - route="/em/session/70566/stats", + route="/legacy/em/session/70566/stats", ), expected=ApiTestExpected(code=200), ), @@ -17,7 +17,7 @@ input=ApiTestInput( permissions=[], login="efgh", - route="/em/session/70566/stats", + route="/legacy/em/session/70566/stats", ), expected=ApiTestExpected(code=403), ), @@ -28,7 +28,7 @@ "own_sessions", ], login="pasteur", - route="/em/session/70566/stats", + route="/legacy/em/session/70566/stats", ), expected=ApiTestExpected(code=200), ), @@ -37,7 +37,7 @@ input=ApiTestInput( permissions=[], login="abcd", - route="/em/session/70566/stats", + route="/legacy/em/session/70566/stats", ), expected=ApiTestExpected(code=403), ), @@ -48,7 +48,7 @@ "own_sessions", ], login="darwin", - route="/em/session/70565/stats", + route="/legacy/em/session/70565/stats", ), expected=ApiTestExpected(code=200), ), @@ -57,7 +57,7 @@ input=ApiTestInput( permissions=[], login="darwin", - route="/em/session/70565/stats", + route="/legacy/em/session/70565/stats", ), expected=ApiTestExpected(code=403), ), @@ -68,7 +68,7 @@ "own_sessions", ], login="abcd", - route="/em/session/70566/stats", + route="/legacy/em/session/70566/stats", ), expected=ApiTestExpected(code=403), ), @@ -82,7 +82,7 @@ "all_proposals", ], login="efgh", - route="/proposals/MX1", + route="/legacy/proposals/MX1", ), expected=ApiTestExpected(code=200), ), @@ -91,7 +91,7 @@ input=ApiTestInput( permissions=[], login="efgh", - route="/proposals/MX1", + route="/legacy/proposals/MX1", ), expected=ApiTestExpected(code=403), ), @@ -102,7 +102,7 @@ "own_proposals", ], login="pasteur", - route="/proposals/MX1", + route="/legacy/proposals/MX1", ), expected=ApiTestExpected(code=200), ), @@ -111,7 +111,7 @@ input=ApiTestInput( permissions=[], login="pasteur", - route="/proposals/MX1", + route="/legacy/proposals/MX1", ), expected=ApiTestExpected(code=403), ), @@ -122,7 +122,7 @@ "own_proposals", ], login="abcd", - route="/proposals/MX1", + route="/legacy/proposals/MX1", ), expected=ApiTestExpected(code=403), ), diff --git a/tests/core/api/data/legacy/proposals.py b/tests/core/api/data/legacy/proposals.py new file mode 100644 index 00000000..76d59320 --- /dev/null +++ b/tests/core/api/data/legacy/proposals.py @@ -0,0 +1,134 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + +test_data_proposal_list = [ + ApiTestElem( + name="list own_proposals", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="pasteur", + route="/legacy/proposals", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "Proposal_proposalId": 9096, + "Proposal_proposalType": "MX", + "Proposal_personId": 404290, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_proposalNumber": "1", + } + ], + ), + ), + ApiTestElem( + name="empty list own_proposals", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="efgh", + route="/legacy/proposals", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_proposals", + input=ApiTestInput( + permissions=[ + "all_proposals", + ], + login="pasteur", + route="/legacy/proposals", + ), + expected=ApiTestExpected(code=200), + ), +] + +test_data_proposal_info = [ + ApiTestElem( + name="own_proposals OK proposal name", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="pasteur", + route="/legacy/proposals/MX1", + ), + expected=ApiTestExpected( + code=200, + res={ + "proposal": { + "proposalId": 9096, + "proposalCode": "MX", + "proposalType": "MX", + "externalId": None, + "personId": 404290, + "title": "TEST", + "proposalNumber": "1", + "bltimeStamp": "2022-05-10T07:59:31", + "state": "Open", + } + }, + ), + ), + ApiTestElem( + name="own_proposals OK proposal id", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="pasteur", + route="/legacy/proposals/9096", + ), + expected=ApiTestExpected( + code=200, + res={ + "proposal": { + "proposalId": 9096, + "proposalCode": "MX", + "proposalType": "MX", + "externalId": None, + "personId": 404290, + "title": "TEST", + "proposalNumber": "1", + "bltimeStamp": "2022-05-10T07:59:31", + "state": "Open", + } + }, + ), + ), + ApiTestElem( + name="own_proposals NOK", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="efgh", + route="/legacy/proposals/9096", + ), + expected=ApiTestExpected( + code=403, + res={ + "detail": "User efgh (permissions assigned: ['own_proposals']) is not authorized to access proposal 9096." + }, + ), + ), + ApiTestElem( + name="invalid proposal", + input=ApiTestInput( + permissions=[ + "all_proposals", + ], + login="pasteur", + route="/legacy/proposals/NOT_A_VALID_PROPOSAL", + ), + expected=ApiTestExpected( + code=200, + res={"proposal": None}, + ), + ), +] diff --git a/tests/core/api/data/legacy/sessions.py b/tests/core/api/data/legacy/sessions.py new file mode 100644 index 00000000..cd944e7b --- /dev/null +++ b/tests/core/api/data/legacy/sessions.py @@ -0,0 +1,798 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_session_list = [ + ApiTestElem( + name="list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70565, + "expSessionPk": 78889, + "beamLineSetupId": 1761425, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-22T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70566, + "expSessionPk": 78888, + "beamLineSetupId": 1761426, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-23T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70567, + "expSessionPk": 56630, + "beamLineSetupId": 1761427, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-07-22T09:30:00", + "BLSession_endDate": "2016-07-23T08:00:00", + "beamLineName": "BL02", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-07-23T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="empty list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="efgh", + route="/legacy/sessions", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_sessions", + input=ApiTestInput( + permissions=[ + "all_sessions", + ], + login="pasteur", + route="/legacy/sessions", + ), + expected=ApiTestExpected(code=200), + ), +] + + +test_data_session_dates_list = [ + ApiTestElem( + name="list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/date/20170512/20170513", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="empty list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/date/20000101/20000102", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_sessions", + input=ApiTestInput( + permissions=[ + "all_sessions", + ], + login="efgh", + route="/legacy/sessions/date/20170512/20170513", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), +] + + +test_data_session_proposal_list = [ + ApiTestElem( + name="list own_sessions name", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70565, + "expSessionPk": 78889, + "beamLineSetupId": 1761425, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-22T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70566, + "expSessionPk": 78888, + "beamLineSetupId": 1761426, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-23T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70567, + "expSessionPk": 56630, + "beamLineSetupId": 1761427, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-07-22T09:30:00", + "BLSession_endDate": "2016-07-23T08:00:00", + "beamLineName": "BL02", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-07-23T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="list own_sessions id", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/9096", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70565, + "expSessionPk": 78889, + "beamLineSetupId": 1761425, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-22T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70566, + "expSessionPk": 78888, + "beamLineSetupId": 1761426, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-23T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70567, + "expSessionPk": 56630, + "beamLineSetupId": 1761427, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-07-22T09:30:00", + "BLSession_endDate": "2016-07-23T08:00:00", + "beamLineName": "BL02", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-07-23T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="list own_sessions empty", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="efgh", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="no rights", + input=ApiTestInput( + permissions=[], + login="pasteur", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected( + code=401, + res={ + "detail": "User pasteur (permissions assigned: []) has no appropriate permission (any: ['own_sessions', 'all_sessions']) to execute method." + }, + ), + ), + ApiTestElem( + name="list proposal does not exist", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/UNKN", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_sessions", + input=ApiTestInput( + permissions=[ + "all_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected(code=200), + ), +] diff --git a/tests/core/api/data/proposals.py b/tests/core/api/data/proposals.py index 960e7f7d..eee46969 100644 --- a/tests/core/api/data/proposals.py +++ b/tests/core/api/data/proposals.py @@ -1,134 +1,64 @@ from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + test_data_proposal_list = [ ApiTestElem( - name="list own_proposals", + name="List proposals", input=ApiTestInput( - permissions=[ - "own_proposals", - ], - login="pasteur", + login="abcd", route="/proposals", ), expected=ApiTestExpected( code=200, - res=[ - { - "Proposal_proposalId": 9096, - "Proposal_proposalType": "MX", - "Proposal_personId": 404290, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_proposalNumber": "1", - } - ], - ), - ), - ApiTestElem( - name="empty list own_proposals", - input=ApiTestInput( - permissions=[ - "own_proposals", - ], - login="efgh", - route="/proposals", - ), - expected=ApiTestExpected(code=200, res=[]), - ), - ApiTestElem( - name="list all_proposals", - input=ApiTestInput( - permissions=[ - "all_proposals", - ], - login="pasteur", - route="/proposals", ), - expected=ApiTestExpected(code=200), ), -] - -test_data_proposal_info = [ ApiTestElem( - name="own_proposals OK proposal name", + name="abcd / MX1 / 404", input=ApiTestInput( - permissions=[ - "own_proposals", - ], - login="pasteur", + login="abcd", route="/proposals/MX1", ), expected=ApiTestExpected( - code=200, - res={ - "proposal": { - "proposalId": 9096, - "proposalCode": "MX", - "proposalType": "MX", - "externalId": None, - "personId": 404290, - "title": "TEST", - "proposalNumber": "1", - "bltimeStamp": "2022-05-10T07:59:31", - "state": "Open", - } - }, + code=404, ), ), ApiTestElem( - name="own_proposals OK proposal id", + name="List proposals (admin)", input=ApiTestInput( permissions=[ - "own_proposals", + "bl_admin", ], - login="pasteur", - route="/proposals/9096", + login="efgh", + route="/proposals", ), expected=ApiTestExpected( code=200, - res={ - "proposal": { - "proposalId": 9096, - "proposalCode": "MX", - "proposalType": "MX", - "externalId": None, - "personId": 404290, - "title": "TEST", - "proposalNumber": "1", - "bltimeStamp": "2022-05-10T07:59:31", - "state": "Open", - } - }, ), ), ApiTestElem( - name="own_proposals NOK", + name="efgh / MX1 / 200", input=ApiTestInput( + login="efgh", permissions=[ - "own_proposals", + "bl_admin", ], - login="efgh", - route="/proposals/9096", + route="/proposals/MX1", ), expected=ApiTestExpected( - code=403, - res={ - "detail": "User efgh (permissions assigned: ['own_proposals']) is not authorized to access proposal 9096." - }, + code=200, ), ), ApiTestElem( - name="invalid proposal", + name="efgh / blc00001 / 404", input=ApiTestInput( + login="efgh", permissions=[ - "all_proposals", + "bl_admin", ], - login="pasteur", - route="/proposals/NOT_A_VALID_PROPOSAL", + route="/proposals/blc00001", ), expected=ApiTestExpected( - code=200, - res={"proposal": None}, + code=404, ), ), ] diff --git a/tests/core/api/data/proteins.py b/tests/core/api/data/proteins.py new file mode 100644 index 00000000..de4df576 --- /dev/null +++ b/tests/core/api/data/proteins.py @@ -0,0 +1,38 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_proteins_list = [ + ApiTestElem( + name="List proteins", + input=ApiTestInput( + login="abcd", + route="/proteins", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get a protein", + input=ApiTestInput( + login="abcd", + route="/proteins/1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List proteins (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/proteins", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/samples.py b/tests/core/api/data/samples.py new file mode 100644 index 00000000..fa6168f7 --- /dev/null +++ b/tests/core/api/data/samples.py @@ -0,0 +1,111 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_samples_list = [ + ApiTestElem( + name="List samples", + input=ApiTestInput( + login="abcd", + route="/samples", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get a samples", + input=ApiTestInput( + login="abcd", + route="/samples/1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List samples (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/samples", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] + +test_data_subsamples_list = [ + ApiTestElem( + name="List sub samples", + input=ApiTestInput( + login="abcd", + route="/samples/sub", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get a sub sample", + input=ApiTestInput( + login="abcd", + route="/samples/sub/2", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List sub samples (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/samples/sub", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] + + +test_data_sampleimages_list = [ + ApiTestElem( + name="List sample images", + input=ApiTestInput( + login="abcd", + route="/samples/images", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get sample image", + input=ApiTestInput( + login="abcd", + route="/samples/images/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="List sample images (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/samples/images", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/sessions.py b/tests/core/api/data/sessions.py index 32a2ef6c..90bd56e0 100644 --- a/tests/core/api/data/sessions.py +++ b/tests/core/api/data/sessions.py @@ -1,798 +1,51 @@ from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput -test_data_session_list = [ +test_data_sessions_list = [ ApiTestElem( - name="list own_sessions", + name="List sessions", input=ApiTestInput( - permissions=[ - "own_sessions", - ], - login="pasteur", + login="abcd", route="/sessions", ), expected=ApiTestExpected( code=200, - res=[ - { - "sessionId": 70565, - "expSessionPk": 78889, - "beamLineSetupId": 1761425, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-11-22T09:30:00", - "BLSession_endDate": "2016-11-23T17:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 1, - "comments": None, - "beamLineOperator": "DARWIN C", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "91481", - "BLSession_lastUpdate": "2016-11-23T17:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70566, - "expSessionPk": 78888, - "beamLineSetupId": 1761426, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-11-23T09:30:00", - "BLSession_endDate": "2016-11-23T17:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 1, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2016-11-23T17:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70567, - "expSessionPk": 56630, - "beamLineSetupId": 1761427, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-07-22T09:30:00", - "BLSession_endDate": "2016-07-23T08:00:00", - "beamLineName": "BL02", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "DARWIN C", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "91481", - "BLSession_lastUpdate": "2016-07-23T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70568, - "expSessionPk": 79910, - "beamLineSetupId": 1761428, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2017-05-12T09:30:00", - "BLSession_endDate": "2017-05-13T08:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2017-05-13T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - ], ), ), ApiTestElem( - name="empty list own_sessions", + name="abcd / blc00001-1 / 200", input=ApiTestInput( - permissions=[ - "own_sessions", - ], - login="efgh", - route="/sessions", - ), - expected=ApiTestExpected(code=200, res=[]), - ), - ApiTestElem( - name="list all_sessions", - input=ApiTestInput( - permissions=[ - "all_sessions", - ], - login="pasteur", - route="/sessions", - ), - expected=ApiTestExpected(code=200), - ), -] - - -test_data_session_dates_list = [ - ApiTestElem( - name="list own_sessions", - input=ApiTestInput( - permissions=[ - "own_sessions", - ], - login="pasteur", - route="/sessions/date/20170512/20170513", + login="abcd", + route="/sessions/1", ), expected=ApiTestExpected( code=200, - res=[ - { - "sessionId": 70568, - "expSessionPk": 79910, - "beamLineSetupId": 1761428, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2017-05-12T09:30:00", - "BLSession_endDate": "2017-05-13T08:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2017-05-13T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - ], - ), - ), - ApiTestElem( - name="empty list own_sessions", - input=ApiTestInput( - permissions=[ - "own_sessions", - ], - login="pasteur", - route="/sessions/date/20000101/20000102", ), - expected=ApiTestExpected(code=200, res=[]), ), ApiTestElem( - name="list all_sessions", + name="List sessions (admin)", input=ApiTestInput( permissions=[ - "all_sessions", + "bl_admin", ], login="efgh", - route="/sessions/date/20170512/20170513", - ), - expected=ApiTestExpected( - code=200, - res=[ - { - "sessionId": 70568, - "expSessionPk": 79910, - "beamLineSetupId": 1761428, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2017-05-12T09:30:00", - "BLSession_endDate": "2017-05-13T08:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2017-05-13T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - ], - ), - ), -] - - -test_data_session_proposal_list = [ - ApiTestElem( - name="list own_sessions name", - input=ApiTestInput( - permissions=[ - "own_sessions", - ], - login="pasteur", - route="/sessions/proposal/MX1", - ), - expected=ApiTestExpected( - code=200, - res=[ - { - "sessionId": 70565, - "expSessionPk": 78889, - "beamLineSetupId": 1761425, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-11-22T09:30:00", - "BLSession_endDate": "2016-11-23T17:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 1, - "comments": None, - "beamLineOperator": "DARWIN C", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "91481", - "BLSession_lastUpdate": "2016-11-23T17:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70566, - "expSessionPk": 78888, - "beamLineSetupId": 1761426, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-11-23T09:30:00", - "BLSession_endDate": "2016-11-23T17:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 1, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2016-11-23T17:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70567, - "expSessionPk": 56630, - "beamLineSetupId": 1761427, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-07-22T09:30:00", - "BLSession_endDate": "2016-07-23T08:00:00", - "beamLineName": "BL02", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "DARWIN C", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "91481", - "BLSession_lastUpdate": "2016-07-23T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70568, - "expSessionPk": 79910, - "beamLineSetupId": 1761428, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2017-05-12T09:30:00", - "BLSession_endDate": "2017-05-13T08:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2017-05-13T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - ], - ), - ), - ApiTestElem( - name="list own_sessions id", - input=ApiTestInput( - permissions=[ - "own_sessions", - ], - login="pasteur", - route="/sessions/proposal/9096", + route="/sessions", ), expected=ApiTestExpected( code=200, - res=[ - { - "sessionId": 70565, - "expSessionPk": 78889, - "beamLineSetupId": 1761425, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-11-22T09:30:00", - "BLSession_endDate": "2016-11-23T17:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 1, - "comments": None, - "beamLineOperator": "DARWIN C", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "91481", - "BLSession_lastUpdate": "2016-11-23T17:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70566, - "expSessionPk": 78888, - "beamLineSetupId": 1761426, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-11-23T09:30:00", - "BLSession_endDate": "2016-11-23T17:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 1, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2016-11-23T17:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70567, - "expSessionPk": 56630, - "beamLineSetupId": 1761427, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2016-07-22T09:30:00", - "BLSession_endDate": "2016-07-23T08:00:00", - "beamLineName": "BL02", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "DARWIN C", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "91481", - "BLSession_lastUpdate": "2016-07-23T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - { - "sessionId": 70568, - "expSessionPk": 79910, - "beamLineSetupId": 1761428, - "proposalId": 9096, - "projectCode": None, - "BLSession_startDate": "2017-05-12T09:30:00", - "BLSession_endDate": "2017-05-13T08:00:00", - "beamLineName": "BL01", - "scheduled": 1, - "nbShifts": 3, - "comments": None, - "beamLineOperator": "PASTEUR L", - "visit_number": None, - "bltimeStamp": "2022-05-10T07:59:32", - "usedFlag": None, - "sessionTitle": None, - "structureDeterminations": None, - "dewarTransport": None, - "databackupFrance": None, - "databackupEurope": None, - "operatorSiteNumber": "17074", - "BLSession_lastUpdate": "2017-05-13T08:00:00", - "BLSession_protectedData": None, - "Proposal_title": "TEST", - "Proposal_proposalCode": "MX", - "Proposal_ProposalNumber": "1", - "Proposal_ProposalType": "MX", - "Person_personId": 404290, - "Person_familyName": "PASTEUR", - "Person_givenName": "Louis", - "Person_emailAddress": "test@test.test", - "energyScanCount": 0, - "sampleCount": 0, - "imagesCount": None, - "testDataCollectionGroupCount": 0, - "dataCollectionGroupCount": 0, - "EMdataCollectionGroupCount": 0, - "xrfSpectrumCount": 0, - "hplcCount": 0, - "sampleChangerCount": 0, - "calibrationCount": 0, - "lastExperimentDataCollectionGroup": None, - "lastEndTimeDataCollectionGroup": None, - }, - ], ), ), ApiTestElem( - name="list own_sessions empty", + name="efgh / blc00001-1 / 404", input=ApiTestInput( - permissions=[ - "own_sessions", - ], login="efgh", - route="/sessions/proposal/MX1", - ), - expected=ApiTestExpected(code=200, res=[]), - ), - ApiTestElem( - name="no rights", - input=ApiTestInput( - permissions=[], - login="pasteur", - route="/sessions/proposal/MX1", - ), - expected=ApiTestExpected( - code=401, - res={ - "detail": "User pasteur (permissions assigned: []) has no appropriate permission (any: ['own_sessions', 'all_sessions']) to execute method." - }, - ), - ), - ApiTestElem( - name="list proposal does not exist", - input=ApiTestInput( permissions=[ - "own_sessions", + "bl_admin", ], - login="pasteur", - route="/sessions/proposal/UNKN", + route="/sessions/1", ), - expected=ApiTestExpected(code=200, res=[]), - ), - ApiTestElem( - name="list all_sessions", - input=ApiTestInput( - permissions=[ - "all_sessions", - ], - login="pasteur", - route="/sessions/proposal/MX1", + expected=ApiTestExpected( + code=404, ), - expected=ApiTestExpected(code=200), ), ] diff --git a/tests/core/api/test_authorization.py b/tests/core/api/legacy/test_authorization.py similarity index 92% rename from tests/core/api/test_authorization.py rename to tests/core/api/legacy/test_authorization.py index ad321482..4943f6b3 100644 --- a/tests/core/api/test_authorization.py +++ b/tests/core/api/legacy/test_authorization.py @@ -5,7 +5,7 @@ from tests.conftest import AuthClient from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem -from tests.core.api.data.authorization import ( +from tests.core.api.data.legacy.authorization import ( test_data_session, test_data_proposal, ) diff --git a/tests/core/api/legacy/test_proposals_legacy.py b/tests/core/api/legacy/test_proposals_legacy.py new file mode 100644 index 00000000..266f1c07 --- /dev/null +++ b/tests/core/api/legacy/test_proposals_legacy.py @@ -0,0 +1,21 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.legacy.proposals import ( + test_data_proposal_list, + test_data_proposal_info, +) + + +@pytest.mark.parametrize("test_elem", test_data_proposal_list, ids=get_elem_name) +def test_proposal_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_proposal_info, ids=get_elem_name) +def test_proposal_info(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/legacy/test_sessions_legacy.py b/tests/core/api/legacy/test_sessions_legacy.py new file mode 100644 index 00000000..26ba4838 --- /dev/null +++ b/tests/core/api/legacy/test_sessions_legacy.py @@ -0,0 +1,33 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.legacy.sessions import ( + test_data_session_proposal_list, + test_data_session_list, + test_data_session_dates_list, +) + + +@pytest.mark.parametrize("test_elem", test_data_session_list, ids=get_elem_name) +def test_session_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_session_dates_list, ids=get_elem_name) +def test_session_dates_list( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize( + "test_elem", test_data_session_proposal_list, ids=get_elem_name +) +def test_session_proposal_list( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_beamline_groups.py b/tests/core/api/test_beamline_groups.py new file mode 100644 index 00000000..049e3537 --- /dev/null +++ b/tests/core/api/test_beamline_groups.py @@ -0,0 +1,43 @@ +from starlette.types import ASGIApp + +from tests.core.api.utils.permissions import mock_permissions +from tests.authclient import AuthClient + + +def test_all_proposals(auth_client_efgh: AuthClient, app: ASGIApp): + """Browse all proposals""" + with mock_permissions("all_proposals", app): + resp = auth_client_efgh.get("/proposals") + assert resp.status_code == 200 + json = resp.json() + + assert len(json["results"]) == 2 + + +def test_bl_admin(auth_client_efgh: AuthClient, app: ASGIApp, with_beamline_groups): + """Should be able to browse proposals on beamline BL01 and BL02""" + with mock_permissions("bl_admin", app): + resp = auth_client_efgh.get("/proposals") + assert resp.status_code == 200 + json = resp.json() + + assert len(json["results"]) == 1 + assert set(json["results"][0]["_metadata"]["beamLines"]) == set( + ["BL01", "BL02"] + ) + + +def test_no_permission(auth_client_abcd: AuthClient): + """Browse only proposals with SessionHasPerson links""" + resp = auth_client_abcd.get("/proposals") + assert resp.status_code == 200 + json = resp.json() + + assert len(json["results"]) == 1 + assert json["results"][0]["_metadata"]["beamLines"] == ["bl"] + + +def test_sessions_for_group(auth_client_abcd: AuthClient, with_beamline_groups): + """Browse sessions for beamline group""" + resp = auth_client_abcd.get("/sessions/group?beamLineGroup=BL0x") + assert resp.status_code == 200 diff --git a/tests/core/api/test_datacollections.py b/tests/core/api/test_datacollections.py new file mode 100644 index 00000000..4cdd05d4 --- /dev/null +++ b/tests/core/api/test_datacollections.py @@ -0,0 +1,27 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.datacollections import ( + test_data_dc_attachments, + test_dc_images, + test_workflows, +) + + +@pytest.mark.parametrize("test_elem", test_data_dc_attachments, ids=get_elem_name) +def test_dc_attachments(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_dc_images, ids=get_elem_name) +def test_dc_images(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_workflows, ids=get_elem_name) +def test_workflows(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_labcontacts.py b/tests/core/api/test_labcontacts.py new file mode 100644 index 00000000..bd544b6d --- /dev/null +++ b/tests/core/api/test_labcontacts.py @@ -0,0 +1,63 @@ +import time + +from starlette.types import ASGIApp + +from tests.authclient import AuthClient + +LABCONTACT = { + "proposalId": 1, + "cardName": f"test card {time.time()}", + "Person": { + "givenName": "test person", + "familyName": "last name", + "Laboratory": { + "name": "lab name", + "address": "address", + "city": "city", + "country": "country", + }, + }, +} + +UPDATED_CONTACT = {"cardName": f"updated card {time.time()}"} + + +def test_labcontacts(auth_client_abcd: AuthClient, app: ASGIApp): + """Browse lab contacts""" + resp = auth_client_abcd.get("/labcontacts") + assert resp.status_code == 200 + + +def test_create_labcontact(auth_client_abcd: AuthClient, app: ASGIApp): + """Create a lab contact""" + + resp = auth_client_abcd.post("/labcontacts", payload=LABCONTACT) + assert resp.status_code == 201 + json = resp.json() + + resp = auth_client_abcd.get(f"/labcontacts/{json['labContactId']}") + assert resp.status_code == 200 + + +def test_create_labcontact_invalid_proposal(auth_client_efgh: AuthClient, app: ASGIApp): + """Create a lab contact without valid proposal""" + + resp = auth_client_efgh.post("/labcontacts", payload=LABCONTACT) + assert resp.status_code == 404 + + +def test_update_labcontact(auth_client_abcd: AuthClient, app: ASGIApp): + """Browse lab contacts""" + resp = auth_client_abcd.get("/labcontacts") + assert resp.status_code == 200 + + json = resp.json() + results = json["results"] + latest = results[-1] + + resp = auth_client_abcd.patch( + f"/labcontacts/{latest['labContactId']}", payload=UPDATED_CONTACT + ) + assert resp.status_code == 200 + updated_json = resp.json() + assert updated_json["cardName"] == UPDATED_CONTACT["cardName"] diff --git a/tests/core/api/test_proposals.py b/tests/core/api/test_proposals.py index 5ab4a394..baab7613 100644 --- a/tests/core/api/test_proposals.py +++ b/tests/core/api/test_proposals.py @@ -5,17 +5,9 @@ from tests.conftest import AuthClient from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem -from tests.core.api.data.proposals import ( - test_data_proposal_list, - test_data_proposal_info, -) +from tests.core.api.data.proposals import test_data_proposal_list @pytest.mark.parametrize("test_elem", test_data_proposal_list, ids=get_elem_name) def test_proposal_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): run_test(auth_client, test_elem, app) - - -@pytest.mark.parametrize("test_elem", test_data_proposal_info, ids=get_elem_name) -def test_proposal_info(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): - run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_proteins.py b/tests/core/api/test_proteins.py new file mode 100644 index 00000000..4478678d --- /dev/null +++ b/tests/core/api/test_proteins.py @@ -0,0 +1,15 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.proteins import ( + test_data_proteins_list, +) + + +@pytest.mark.parametrize("test_elem", test_data_proteins_list, ids=get_elem_name) +def test_proteins_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_samples.py b/tests/core/api/test_samples.py new file mode 100644 index 00000000..6bf4099d --- /dev/null +++ b/tests/core/api/test_samples.py @@ -0,0 +1,27 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.samples import ( + test_data_samples_list, + test_data_sampleimages_list, + test_data_subsamples_list, +) + + +@pytest.mark.parametrize("test_elem", test_data_samples_list, ids=get_elem_name) +def test_samples_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_sampleimages_list, ids=get_elem_name) +def test_sample_images(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_subsamples_list, ids=get_elem_name) +def test_subsamples_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_sessions.py b/tests/core/api/test_sessions.py index dcc8f73a..6b301daa 100644 --- a/tests/core/api/test_sessions.py +++ b/tests/core/api/test_sessions.py @@ -5,29 +5,9 @@ from tests.conftest import AuthClient from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem -from tests.core.api.data.sessions import ( - test_data_session_proposal_list, - test_data_session_list, - test_data_session_dates_list, -) +from tests.core.api.data.sessions import test_data_sessions_list -@pytest.mark.parametrize("test_elem", test_data_session_list, ids=get_elem_name) +@pytest.mark.parametrize("test_elem", test_data_sessions_list, ids=get_elem_name) def test_session_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): run_test(auth_client, test_elem, app) - - -@pytest.mark.parametrize("test_elem", test_data_session_dates_list, ids=get_elem_name) -def test_session_dates_list( - auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp -): - run_test(auth_client, test_elem, app) - - -@pytest.mark.parametrize( - "test_elem", test_data_session_proposal_list, ids=get_elem_name -) -def test_session_proposal_list( - auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp -): - run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_user.py b/tests/core/api/test_user.py new file mode 100644 index 00000000..633792fc --- /dev/null +++ b/tests/core/api/test_user.py @@ -0,0 +1,9 @@ +from starlette.types import ASGIApp + +from tests.authclient import AuthClient + + +def test_user(auth_client_efgh: AuthClient, app: ASGIApp): + """Get current user""" + resp = auth_client_efgh.get("/user/current") + assert resp.status_code == 200 diff --git a/tests/core/api/test_userportalsync_create.py b/tests/core/api/test_userportalsync_create.py index 9142005a..f7a8edf0 100644 --- a/tests/core/api/test_userportalsync_create.py +++ b/tests/core/api/test_userportalsync_create.py @@ -24,6 +24,7 @@ def test_call_sync_proposal_create( def test_proposal_persons_sync(with_db_session): # Only one proposal with proposalCode and proposalNumber should have been created in DB proposals = get_proposals( + withAuthorization=False, skip=0, limit=10, proposalCode=test_data_proposal_userportalsync_create["proposal"][ @@ -32,7 +33,6 @@ def test_proposal_persons_sync(with_db_session): proposalNumber=test_data_proposal_userportalsync_create["proposal"][ "proposalNumber" ], - proposalHasPerson=True, ) assert proposals.total == 1 @@ -66,8 +66,9 @@ def test_proposal_persons_sync(with_db_session): assert first_person_id == proposals.results[0].personId # Check the number of persons within the ProposalHasPerson table - assert len(test_data_proposal_userportalsync_create["proposal"]["persons"]) == len( - proposals.results[0].ProposalHasPerson + assert ( + len(test_data_proposal_userportalsync_create["proposal"]["persons"]) + == proposals.results[0]._metadata["persons"] ) @@ -114,10 +115,10 @@ def test_session_persons_sync(with_db_session): try: if json_session["externalId"] is not None: sessions = get_sessions( + withAuthorization=False, skip=0, limit=10, externalId=json_session["externalId"], - sessionHasPerson=True, ) except KeyError: pass @@ -127,6 +128,7 @@ def test_session_persons_sync(with_db_session): # It might be deprecated later if json_session["expSessionPk"] is not None: sessions = get_sessions( + withAuthorization=False, skip=0, limit=10, expSessionPk=json_session["expSessionPk"], @@ -138,8 +140,8 @@ def test_session_persons_sync(with_db_session): sessions_in_db += 1 # Check the number of persons within the Session_has_Person table - assert len(json_session["persons"]) == len( - sessions.results[0].SessionHasPerson + assert ( + len(json_session["persons"]) == sessions.results[0]._metadata["persons"] ) # Check the amount of sessions corresponds with the entries in the DB @@ -149,6 +151,7 @@ def test_session_persons_sync(with_db_session): def test_lab_contacts_sync(with_db_session): # Get the proposal from the DB proposals = get_proposals( + withAuthorization=False, skip=0, limit=10, proposalCode=test_data_proposal_userportalsync_create["proposal"][ @@ -160,6 +163,7 @@ def test_lab_contacts_sync(with_db_session): ) # Get the lab contacts for the proposal in DB labcontacts = get_labcontacts( + withAuthorization=False, skip=0, limit=10, proposalId=proposals.results[0].proposalId, @@ -177,6 +181,7 @@ def test_lab_contacts_sync(with_db_session): def test_proteins_sync(with_db_session): # Get the proposal from the DB proposals = get_proposals( + withAuthorization=False, skip=0, limit=10, proposalCode=test_data_proposal_userportalsync_create["proposal"][ @@ -193,6 +198,7 @@ def test_proteins_sync(with_db_session): try: if protein["externalId"] is not None: proteins = get_proteins( + withAuthorization=False, skip=0, limit=10, externalId=protein["externalId"], @@ -203,6 +209,7 @@ def test_proteins_sync(with_db_session): try: proteins = get_proteins( + withAuthorization=False, skip=0, limit=10, acronym=protein["acronym"], diff --git a/tests/core/api/test_userportalsync_update.py b/tests/core/api/test_userportalsync_update.py index 105f02c6..309de735 100644 --- a/tests/core/api/test_userportalsync_update.py +++ b/tests/core/api/test_userportalsync_update.py @@ -21,6 +21,7 @@ def test_call_sync_proposal_create( def test_proposal_title_update(with_db_session): # Get the proposal from the DB proposals = get_proposals( + withAuthorization=False, skip=0, limit=10, proposalCode=test_data_proposal_userportalsync_update["proposal"][ @@ -77,6 +78,7 @@ def test_person_laboratory_name_update(with_db_session): def test_session_beamline_name_update(with_db_session): # Get the session from the DB sessions = get_sessions( + withAuthorization=False, skip=0, limit=10, externalId=test_data_proposal_userportalsync_update["sessions"][0][ diff --git a/tests/core/api/utils/apitest.py b/tests/core/api/utils/apitest.py index a707218f..49c049c5 100644 --- a/tests/core/api/utils/apitest.py +++ b/tests/core/api/utils/apitest.py @@ -11,9 +11,10 @@ class ApiTestInput: def __init__( self, + *, login: str, - permissions: list[str], route: str, + permissions: list[str] = [], method: str = "get", payload: str | None = None, ) -> None: