Skip to content

Commit

Permalink
Upgraded suppport and migrating to primary python version 3.11; fasta…
Browse files Browse the repository at this point in the history
…pi to 0.103.1 which included major version update for pydantic
  • Loading branch information
SeriousHorncat committed Oct 2, 2023
1 parent 2855754 commit 6d85fb4
Show file tree
Hide file tree
Showing 13 changed files with 81 additions and 108 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.8"]
python-version: ["3.11"]

defaults:
run:
Expand Down
4 changes: 2 additions & 2 deletions backend/.pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -310,8 +310,8 @@ min-public-methods=2
[EXCEPTIONS]

# Exceptions that will emit a warning when caught.
overgeneral-exceptions=BaseException,
Exception
overgeneral-exceptions=builtins.BaseException,
builtins.Exception


[FORMAT]
Expand Down
4 changes: 2 additions & 2 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
# Local Development Stage
FROM python:3.9.10 as development-stage
FROM python:3.11-slim-bookworm as development-stage
WORKDIR /app
COPY requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
COPY ./src /app/src
ENTRYPOINT ["/bin/sh", "-c", "uvicorn src.main:app --host 0.0.0.0 --port 8000 --log-level info --reload"]

# Production Build Stage
FROM python:3.9.10 as production-stage
FROM python:3.11-slim-bookworm as production-stage
WORKDIR /app
COPY logging.conf /app/logging.conf
COPY requirements.txt /app/requirements.txt
Expand Down
18 changes: 9 additions & 9 deletions backend/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
fastapi==0.74.1
uvicorn[standard]==0.22.0
fastapi[all]==0.103.1
uvicorn[standard]==0.23.2
python-cas==1.6.0
itsdangerous==2.1.2
pymongo==4.3.3
jq==1.4.0
pymongo==4.5.0
jq==1.6.0

python-multipart==0.0.5
python-multipart==0.0.6
python-jose[cryptography]==3.3.0
passlib==1.7.4
bcrypt==4.0.1

# dev
pytest-cov==4.0.0
pytest==7.2.1
pylint==2.15.10
requests==2.28.2
yapf==0.32.0
pytest==7.4.2
pylint==3.0.0
requests==2.31.0
yapf==0.40.2
7 changes: 4 additions & 3 deletions backend/src/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
"""
# pylint: disable=too-few-public-methods
from functools import lru_cache
from pydantic import BaseSettings, root_validator
from pydantic import model_validator
from pydantic_settings import BaseSettings


class Settings(BaseSettings):
Expand All @@ -18,15 +19,15 @@ class Settings(BaseSettings):
mongodb_host: str = "rosalution-db"
mongodb_db: str = "rosalution_db"
rosalution_key: str
auth_web_failure_redirect_route = "/login"
auth_web_failure_redirect_route: str = "/login"
oauth2_access_token_expire_minutes: int = 60 * 24 * 8 # 60 minutes * 24 hours * 8 days = 8 days
oauth2_algorithm: str = "HS256"
openapi_api_token_route: str = "auth/token"
cas_api_service_url: str = "http://dev.cgds.uab.edu/rosalution/api/auth/login?nexturl=%2F"
cas_server_url: str = "https://padlockdev.idm.uab.edu/cas/"
cas_login_enable: bool = False

@root_validator(pre=True)
@model_validator(mode="before")
@classmethod
def rosalution_key_exists(cls, values):
"""
Expand Down
50 changes: 30 additions & 20 deletions backend/src/models/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from multiprocessing import Event
import re
from typing import List, Optional
from pydantic import BaseModel, root_validator
from pydantic import BaseModel, computed_field

from .event import Event

Expand All @@ -34,29 +34,39 @@ class BaseAnalysis(BaseModel):
"""The share parts of an analysis and it's summary"""

name: str
description: Optional[str]
description: Optional[str] = None
nominated_by: str
latest_status: Optional[StatusType]
created_date: Optional[date]
last_modified_date: Optional[date]
timeline: List[Event] = []
third_party_links: Optional[List] = []

# The structure of the root_validator from pydantic requires the method to be setup this way even if there is no
# self being used and no self argument
@root_validator
def compute_dates_and_status(cls, values): #pylint: disable=no-self-argument
"""Computes the dates and status of an analysis from a timeline"""
if len(values['timeline']) == 0:
return values

last_event = sorted(values['timeline'], key=lambda event: event.timestamp, reverse=True)[0]
values['last_modified_date'] = last_event.timestamp.date()
values['created_date'] = next(
(event.timestamp.date() for event in values['timeline'] if event.event == EventType.CREATE), None
)
values['latest_status'] = StatusType.from_event(last_event.event)
return values
@computed_field
@property
def created_date(self) -> date:
"""The created date derived from the create event in the timeline"""
if len(self.timeline) == 0:
return None

return next((event.timestamp.date() for event in self.timeline if event.event == EventType.CREATE), None)

@computed_field
@property
def last_modified_date(self) -> date:
"""The last modified date derived from the last event in the timeline"""
if len(self.timeline) == 0:
return None

last_event = sorted(self.timeline, key=lambda event: event.timestamp, reverse=True)[0]
return last_event.timestamp.date()

@computed_field
@property
def latest_status(self) -> StatusType:
"""The status as calculated from the events on the timeline"""
if len(self.timeline) == 0:
return None

last_event = sorted(self.timeline, key=lambda event: event.timestamp, reverse=True)[0]
return StatusType.from_event(last_event.event)


class AnalysisSummary(BaseAnalysis):
Expand Down
26 changes: 5 additions & 21 deletions backend/src/models/phenotips_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@
# pylint: disable=too-few-public-methods
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel, Extra
from pydantic import BaseModel, ConfigDict


class PhenotipsVariants(BaseModel):
"""Models a variant within a Phenotips json import"""
model_config = ConfigDict(extra='ignore')

gene: Optional[str] = None
inheritance: Optional[str] = None
Expand All @@ -20,49 +21,32 @@ class PhenotipsVariants(BaseModel):
cdna: Optional[str] = None
reference_genome: str

class config: # pylint: disable=invalid-name
"""Configures the pydantic model"""

extra = Extra.allow


class PhenotipsGene(BaseModel):
"""Models a gene within a Phenotips json genes"""
model_config = ConfigDict(extra='ignore')

comments: Optional[str] = None
gene: str
id: Optional[str] = None
strategy: Optional[list] = None
status: Optional[str] = None

class config: # pylint: disable=invalid-name
"""Configures the pydantic model"""

extra = Extra.allow


class PhenotipsHpoTerm(BaseModel):
"""Models a gene within a Phenotips json genes"""
model_config = ConfigDict(extra='ignore')

id: str
label: str = ""

class config: # pylint: disable=invalid-name
"""Configures the pydantic model"""

extra = Extra.allow


class BasePhenotips(BaseModel):
"""The share parts of a phenotips and it's summary"""
model_config = ConfigDict(extra='ignore')

date: datetime
external_id: str
variants: List[PhenotipsVariants] = []
genes: List[PhenotipsGene] = []
features: List[PhenotipsHpoTerm] = []

class config: # pylint: disable=invalid-name
"""Configures the pydantic model"""

extra = Extra.allow
2 changes: 1 addition & 1 deletion backend/src/repository/analysis_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ def update_event(self, analysis_name: str, username: str, event_type: EventType)
analysis = self.collection.find_one({"name": analysis_name})
if not analysis:
raise ValueError(f"Analysis with name {analysis_name} does not exist.")
analysis['timeline'].append(Event.timestamp_event(username, event_type).dict())
analysis['timeline'].append(Event.timestamp_event(username, event_type).model_dump())

updated_document = self.collection.find_one_and_update(
{"name": analysis_name},
Expand Down
4 changes: 2 additions & 2 deletions backend/src/routers/analysis_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@ async def create_file(

phenotips_importer = PhenotipsImporter(repositories["analysis"], repositories["genomic_unit"])
try:
new_analysis = phenotips_importer.import_phenotips_json(phenotips_input.dict())
new_analysis['timeline'].append(Event.timestamp_create_event(username).dict())
new_analysis = phenotips_importer.import_phenotips_json(phenotips_input.model_dump())
new_analysis['timeline'].append(Event.timestamp_create_event(username).model_dump())
repositories['analysis'].create_analysis(new_analysis)

except ValueError as exception:
Expand Down
2 changes: 1 addition & 1 deletion backend/src/security/oauth2.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class OAuth2ClientCredentialsRequestForm:

def __init__(
self,
grant_type: str = Form(None, regex="client_credentials"),
grant_type: str = Form(None, pattern="client_credentials"),
scope: str = Form(""),
client_id: Optional[str] = Form(None),
client_secret: Optional[str] = Form(None),
Expand Down
55 changes: 15 additions & 40 deletions backend/tests/integration/test_analysis_routers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Analysis Routes Integration test"""

import json
import os
import datetime

from unittest.mock import patch
Expand All @@ -12,7 +11,7 @@

from src.core.annotation import AnnotationService

from ..test_utils import read_database_fixture, read_test_fixture
from ..test_utils import fixture_filepath, read_database_fixture, read_test_fixture


def test_get_analyses(client, mock_access_token, mock_repositories):
Expand Down Expand Up @@ -61,16 +60,9 @@ def test_import_analysis_with_phenotips_json(
)
mock_repositories['genomic_unit'].collection.find.return_value = read_database_fixture("genomic-units.json")

# This is used here because the 'read_fixture' returns a json dict rather than raw binary
# We actually want to send a binary file through the endpoint to simulate a file being sent
# then json.loads is used on the other end in the repository.
# This'll get updated and broken out in the test_utils in the future
path_to_current_file = os.path.realpath(__file__)
current_directory = os.path.split(path_to_current_file)[0]
path_to_file = os.path.join(current_directory, '../fixtures/' + 'phenotips-import.json')

with patch.object(BackgroundTasks, "add_task", return_value=None) as mock_background_add_task:
with open(path_to_file, "rb") as phenotips_file:
analysis_import_json_filepath = fixture_filepath('phenotips-import.json')
with open(analysis_import_json_filepath, "rb") as phenotips_file:
response = client.post(
"/analysis/import_file",
headers={"Authorization": "Bearer " + mock_access_token},
Expand Down Expand Up @@ -236,31 +228,21 @@ def test_attach_image_to_pedigree_section(client, mock_access_token, mock_reposi
mock_repositories['analysis'].collection.find_one_and_update.return_value = expected
mock_repositories['bucket'].bucket.put.return_value = "633afb87fb250a6ea1569555"

# This is used here because the 'read_fixture' returns a json dict rather than raw binary
# We actually want to send a binary file through the endpoint to simulate a file being sent
# then json.loads is used on the other end in the repository.
# This'll get updated and broken out in the test_utils in the future
path_to_current_file = os.path.realpath(__file__)
current_directory = os.path.split(path_to_current_file)[0]
path_to_file = os.path.join(current_directory, '../fixtures/' + 'pedigree-fake.jpg')

with open(path_to_file, "rb") as phenotips_file:
pedigree_image = phenotips_file.read()
pedigree_bytes = bytearray(pedigree_image)
section_image_filepath = fixture_filepath('pedigree-fake.jpg')
with open(section_image_filepath, "rb") as phenotips_file:
response = client.post(
"/analysis/CPAM0112/section/attach/image",
headers={"Authorization": "Bearer " + mock_access_token},
files={"upload_file": ("pedigree-fake.jpg", pedigree_bytes)},
files={"upload_file": ("pedigree-fake.jpg", phenotips_file)},
data=({"section_name": "Pedigree", "field_name": "Pedigree"})
)

phenotips_file.close()

assert response.status_code == 201
mock_repositories["analysis"].collection.find_one_and_update.assert_called_with({"name": "CPAM0112"},
{"$set": expected})

assert response.status_code == 201


def test_update_existing_pedigree_section_image(client, mock_access_token, mock_repositories):
""" Testing the update pedigree attachment endpoint """
Expand All @@ -269,24 +251,16 @@ def test_update_existing_pedigree_section_image(client, mock_access_token, mock_
mock_analysis = read_test_fixture("analysis-CPAM0002.json")
mock_repositories["analysis"].collection.find_one_and_update.return_value = mock_analysis

# This is used here because the 'read_fixture' returns a json dict rather than raw binary
# We actually want to send a binary file through the endpoint to simulate a file being sent
# then json.loads is used on the other end in the repository.
# This'll get updated and broken out in the test_utils in the future
path_to_current_file = os.path.realpath(__file__)
current_directory = os.path.split(path_to_current_file)[0]
path_to_file = os.path.join(current_directory, '../fixtures/' + 'pedigree-fake.jpg')

with open(path_to_file, 'rb') as file:
pedigree_image = file.read()
pedigree_bytes = bytearray(pedigree_image)
# Need to send the file as raw binary instead of the processed content
section_image_filepath = fixture_filepath('pedigree-fake.jpg')
with open(section_image_filepath, "rb") as image_file:
response = client.put(
"/analysis/CPAM0002/section/update/633afb87fb250a6ea1569555",
headers={"Authorization": "Bearer " + mock_access_token},
files={"upload_file": ("pedigree-fake.jpg", pedigree_bytes)},
files={"upload_file": ("pedigree-fake.jpg", image_file)},
data=({"section_name": "Pedigree", "field_name": "Pedigree"})
)
file.close()
image_file.close()

expected = {'section': 'Pedigree', 'field': 'Pedigree', 'image_id': '633afb87fb250a6ea1569555'}

Expand All @@ -299,10 +273,11 @@ def test_remove_existing_pedigree_section_image(client, mock_access_token, mock_
mock_repositories["analysis"].collection.find_one.return_value = read_test_fixture("analysis-CPAM0002.json")
mock_repositories["bucket"].bucket.delete.return_value = None

response = client.delete(
response = client.request(
'DELETE',
"/analysis/CPAM0002/section/remove/63505be22888347cf1c275db",
headers={"Authorization": "Bearer " + mock_access_token},
data=({"section_name": "Pedigree", "field_name": "Pedigree"})
data={"section_name": "Pedigree", "field_name": "Pedigree"},
)

mock_repositories["bucket"].bucket.delete.assert_called_with(ObjectId("63505be22888347cf1c275db"))
Expand Down
Loading

0 comments on commit 6d85fb4

Please sign in to comment.