Skip to content

Commit

Permalink
Merge pull request #104 from ai-cfia/94-Manage-picture-set-in-directo…
Browse files Browse the repository at this point in the history
…ries-endpoints

94 manage picture set in directories endpoints
  • Loading branch information
sylvanie85 authored Jul 16, 2024
2 parents ea27142 + bbf64cf commit b5dabe5
Show file tree
Hide file tree
Showing 5 changed files with 318 additions and 21 deletions.
13 changes: 11 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,17 @@ backend->>+AzureStorageAPI: (async) upload_inference_result(json)

### RUNNING NACHET-BACKEND FROM DEVCONTAINER

When you are developping, you can run the program while in the devcontainer by
using this command:
When developping you first need to install the packages required.

This command must be run the **first time** you want to run the backend on your
computer, but also **every time** you update the requirements.txt file and
**every time** the datastore repo is updated

```bash
pip install -r requirements.txt
```

Then, you can run the backend while in the devcontainer by using this command:

```bash
hypercorn -b :8080 app:app
Expand Down
123 changes: 110 additions & 13 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ async def before_serving():
raise ServerError("Incorrect environment variable: PIPELINE_VERSION")

# Store the seeds names and ml structure in CACHE
CACHE["seeds"] = datastore.get_all_seeds()
CACHE["seeds"] = await datastore.get_all_seeds()
CACHE["endpoints"] = await get_pipelines()

print(
Expand Down Expand Up @@ -197,7 +197,7 @@ async def get_user_id() :
print(error)
return jsonify([f"GetUserIdError: {str(error)}"]), 400


# Deprecated
@app.post("/del")
async def delete_directory():
"""
Expand Down Expand Up @@ -232,25 +232,117 @@ async def delete_directory():
print(error)
return jsonify([f"DeleteDirectoryRequestError: {str(error)}"]), 400

@app.post("/delete-request")
async def delete_request():
"""
Request to delete a directory in the user's container.
Return true if there is validated pictuers in it, false otherwise
"""
try:
data = await request.get_json()
user_id = data["container_name"]
picture_set_id = data["folder_uuid"]
if user_id and picture_set_id:
# Open db connection
connection = datastore.get_connection()
cursor = datastore.get_cursor(connection)

@app.post("/dir")
async def list_directories():
response = await datastore.delete_directory_request(cursor, str(user_id), str(picture_set_id))
# Close connection
datastore.end_query(connection, cursor)

return jsonify(response), 200
else:
raise DeleteDirectoryRequestError("missing container or directory name")

except (KeyError, TypeError, azure_storage.MountContainerError, ResourceNotFoundError, DeleteDirectoryRequestError, ServiceResponseError, datastore.DatastoreError) as error:
print(error)
return jsonify([f"DeleteDirectoryRequestError: {str(error)}"]), 400

@app.post("/delete-permanently")
async def delete_permanently():
"""
lists all directories in the user's container
deletes a directory in the user's container permanently
"""
try:
data = await request.get_json()
container_name = data["container_name"]
if container_name:
user_id = container_name
picture_set_id = data["folder_uuid"]
if user_id and picture_set_id:
container_client = await azure_storage.mount_container(
CONNECTION_STRING, container_name, create_container=True
)
response = await azure_storage.get_directories(container_client)
# Open db connection
connection = datastore.get_connection()
cursor = datastore.get_cursor(connection)

response = await datastore.delete_directory_permanently(cursor, str(user_id), str(picture_set_id), container_client)
# Close connection
datastore.end_query(connection, cursor)

return jsonify(response), 200
else:
raise DeleteDirectoryRequestError("missing container or directory name")

except (KeyError, TypeError, azure_storage.MountContainerError, ResourceNotFoundError, DeleteDirectoryRequestError, ServiceResponseError, datastore.DatastoreError) as error:
print(error)
return jsonify([f"DeleteDirectoryRequestError: {str(error)}"]), 400

@app.post("/delete-with-archive")
async def delete_with_archive():
"""
deletes a directory in the user's container and saves the validated pictures in the dev user container
"""
try:
data = await request.get_json()
container_name = data["container_name"]
user_id = container_name
picture_set_id = data["folder_uuid"]
if user_id and picture_set_id:
container_client = await azure_storage.mount_container(
CONNECTION_STRING, container_name, create_container=True
)
# Open db connection
connection = datastore.get_connection()
cursor = datastore.get_cursor(connection)

response = await datastore.delete_directory_with_archive(cursor, str(user_id), str(picture_set_id), container_client)
# Close connection
datastore.end_query(connection, cursor)

if response :
return jsonify(True), 200
else:
raise DeleteDirectoryRequestError("missing container or directory name")

except (KeyError, TypeError, azure_storage.MountContainerError, ResourceNotFoundError, DeleteDirectoryRequestError, ServiceResponseError, datastore.DatastoreError) as error:
print(error)
return jsonify([f"DeleteDirectoryRequestError: {str(error)}"]), 400


@app.post("/dir")
async def list_directories():
"""
lists all directories in the user's container
"""
try:
data = await request.get_json()
user_id = data["container_name"]
if user_id:
# Open db connection
connection = datastore.get_connection()
cursor = datastore.get_cursor(connection)

directories = await datastore.get_directories(cursor, str(user_id))
# Close connection
datastore.end_query(connection, cursor)
return jsonify(directories)
else:
raise ListDirectoriesRequestError("Missing container name")

except (KeyError, TypeError, ListDirectoriesRequestError, azure_storage.MountContainerError) as error:
except (KeyError, TypeError, ListDirectoriesRequestError, azure_storage.MountContainerError, datastore.DatastoreError) as error:
print(error)
return jsonify([f"ListDirectoriesRequestError: {str(error)}"]), 400

Expand All @@ -263,22 +355,27 @@ async def create_directory():
try:
data = await request.get_json()
container_name = data["container_name"]
user_id = container_name
folder_name = data["folder_name"]
if container_name and folder_name:
container_client = await azure_storage.mount_container(
CONNECTION_STRING, container_name, create_container=True
)
response = await azure_storage.create_folder(
container_client, folder_name
)
# Open db connection
connection = datastore.get_connection()
cursor = datastore.get_cursor(connection)

response = await datastore.create_picture_set(cursor, container_client, user_id, 0, folder_name)
# Close connection
datastore.end_query(connection, cursor)
if response:
return jsonify([True]), 200
return jsonify([response]), 200
else:
raise CreateDirectoryRequestError("directory already exists")
else:
raise CreateDirectoryRequestError("missing container or directory name")

except (KeyError, TypeError, CreateDirectoryRequestError, azure_storage.MountContainerError) as error:
except (KeyError, TypeError, CreateDirectoryRequestError, azure_storage.MountContainerError, datastore.DatastoreError) as error:
print(error)
return jsonify([f"CreateDirectoryRequestError: {str(error)}"]), 400

Expand Down
2 changes: 1 addition & 1 deletion docs/nachet-feedback-documentation.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Import Folder Images
# User inference feedback

## Executive summary

Expand Down
167 changes: 167 additions & 0 deletions docs/nachet-manage-folders.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
# Manage folders

## Executive summary

A user is able to have a preview of his blob storage container in the Nachet
application. He can have many folders in his container and pictures in it. Since
we have the database, those folders are related to the picture_set table and
each pictures is also saved in the database. Here is the schema of actual
database.

``` mermaid
---
title: Extract from Nachet DB Structure
---
erDiagram
picture_set{
uuid id PK
json picture_set
uuid owner_id FK
timestamp upload_date
}
picture{
uuid id PK
json picture
uuid picture_set_id FK
uuid parent FK
int nb_object
boolean verified
timestamp upload_date
}
inference{
uuid id PK
json inference
uuid picture_id FK
uuid user_id FK
timestamp upload_date
}
picture_seed{
uuid id PK
uuid picture_id FK
uuid seed_id FK
timestamp upload_date
}
picture_set ||--o{picture: contains
picture ||--o{picture: cropped
picture |o--o{picture_seed: has
picture_seed }o--o| seed: has
inference ||--o{ object: detects
object ||--o{ seed_object: is
seed_object }o--|| seed: is
inference ||--|| picture: infers
```

From the nachet application, a user can create and delete folders, so the blob
storage and the database must be correctly updated.

When a folder is created, it takes on a name and is created as a picture_set in
the database and as a folder in the blob storage container of the user.

There are more issues when the user wants to delete a folder. If the folder
contains validated pictures, it may be useful for training purpose, because it
means there is a valid inference associate with each seed on the picture. The
same applies to pictures imported in batches, which have been downloaded for
training purposes. Our solution is to request confirmation from the user, who
can decide to delete pictures from his container but let us save them, or he can
delete everything anyway, for example if there has been a missed click.

## Prerequisites

- The user must be signed in and have an Azure Storage Container
- The backend need to have a connection with the datastore

## Sequence Diagram

### Delete use case

```mermaid
sequenceDiagram
participant User
participant FE
participant BE
participant DS
User->>FE: Delete Folder
rect rgb(200, 50, 50)
FE->>BE: /delete-request
end
rect rgb(200, 50, 50)
BE->>DS: Check if there are validated inferences or pictures from a batch import
note left of DS: Check for picture_seed entities linked to picture id<br> since a verified inference and a batch upload has those
end
alt picture_seed exist
DS-->>BE: Validated inference status or pictures from batch import
BE->>FE: True : Request user confirmation
rect rgb(200, 50, 50)
FE->>User: Ask to keep data for training
note left of FE : "Some of those pictures were validated or upload via the batch import.<br>Do you want us to keep them for training ? <br>If yes, your folder will be deleted but we'll keep the validated pictures.<br>If no, everything will be deleted and unrecoverable.
end
alt No
User ->>FE: delete all
rect rgb(200, 50, 50)
FE-->BE: /delete-permanently
end
BE->>DS: delete picture_set
else YES
User ->>FE: Keep them
rect rgb(200, 50, 50)
FE-->BE: /delete-with-archive
end
rect rgb(200, 50, 50)
BE->>DS: archive data for validated inferences and batch import in picture_set
note left of DS: "Pictures are moved in different container <br> DB entities updated"
BE->>DS: delete picture_set
note left of DS: "Folder and all the files left are deleted, <br>related pictures, inference are deleted."
end
else CANCEL
User ->>FE: cancel : nothing happens
end
else no picture_seed exist
DS-->>BE: No pictures with validated inference status or from batch import
BE-->>FE: False: confirmation to delete the folder
rect rgb(200, 50, 50)
FE->>User: Ask confirmation
note left of FE : "Are you sure ? Everything in this folder will be deleted and unrecoverable"
end
alt Yes
User ->>FE: delete all
rect rgb(200, 50, 50)
FE-->BE: /delete-permanently
end
BE->>DS: delete picture_set
else CANCEL
User ->>FE: cancel : nothing happens
end
end
```

## API Routes

### /create-dir

The `create-dir` route need a folder_name and create the folder in database and
in Azure Blob storage.

### /dir

The `dir` route retreives all user directories from the database (id, name and
nb_pictures).

### /delete-request

The `delete-request` route return True if there is validated pictures in the
given folder or False else.

### /delete-permanently

THe `delete-permanently`route delete the given folder, it means it delete the
picture_set and every things related in database, and it delete all blobs in the
azure blob storage.

### /delete-with-archive

The `delete-with-archive` route delete the given folder from the user container
but move everything in it n the dev container.
Loading

0 comments on commit b5dabe5

Please sign in to comment.