Skip to content

Commit

Permalink
Merge pull request #149 from luk27official/ph
Browse files Browse the repository at this point in the history
Tasks visualization, docking viewer
  • Loading branch information
skodapetr authored Oct 7, 2024
2 parents f78224c + ccafade commit d0a9ab1
Show file tree
Hide file tree
Showing 31 changed files with 2,189 additions and 834 deletions.
66 changes: 47 additions & 19 deletions executor-docking/run_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,20 @@ class Status(enum.Enum):
FAILED = "failed"
SUCCESSFUL = "successful"

class Logger:
"""
A class to represent a simple logger.
"""
def __init__(self, log_file: str):
self.log_file = open(log_file, "w")

def log(self, message: str):
self.log_file.write(f"{datetime.datetime.now()} - {message}\n")
self.log_file.flush()

def close(self):
self.log_file.close()

def _load_json(path: str):
"""
Method to load a json file from a given path.
Expand Down Expand Up @@ -55,14 +69,14 @@ def get_prediction_directory(docking_directory: str):
"""
Method to get the path to the prediction directory from the docking directory.
"""
#currently assuming that the docking and predictions paths are different just by the name
# currently assuming that the docking and predictions paths are different just by the name
return str.replace(docking_directory, "docking", "predictions")

def get_prediction_path(docking_directory: str):
"""
Method to get the path to the prediction file from the docking directory.
"""
#currently assuming that the docking and predictions paths are different just by the name
# currently assuming that the docking and predictions paths are different just by the name
return os.path.join(get_prediction_directory(docking_directory), "public", "prediction.json")

def prepare_docking(input_file: str, structure_file_gzip: str, task_directory: str):
Expand Down Expand Up @@ -99,51 +113,61 @@ def execute_directory_task(docking_directory: str, taskId: int):
"""
Method to execute a task for a given directory and a given taskId.
"""
log_filename = os.path.join(docking_directory, str(taskId), "log")
logger = Logger(log_filename)

result_file = os.path.join(docking_directory, str(taskId), "public", "result.json")

#check if the directory exists - if not, we did not ask for this task
#check if the result file exists - if it does, we already calculated it
# check if the directory exists - if not, we did not ask for this task
# check if the result file exists - if it does, we already calculated it
if not os.path.exists(docking_directory) or not os.path.isdir(docking_directory) or os.path.exists(result_file):
logger.close()
return

#first update the status file
# first update the status file
status_file = os.path.join(docking_directory, "info.json")
status = _load_json(status_file)

logger.log(f"Task {taskId} started")
status["tasks"][taskId]["status"] = Status.RUNNING.value
_save_status_file(status_file, status, taskId)

#do the actual work here!
#first, look for the gz file with the structure
# first, look for the gz file with the structure
logger.log(f"Looking for structure file in {get_prediction_directory(docking_directory)}")
structure_file = ""
for file_path in glob.glob(os.path.join(get_prediction_directory(docking_directory), "public") + "/*.gz"):
structure_file = file_path
break

if structure_file == "":
#no structure file found, we cannot do anything
#this should not happen because the structure has to be downloadable for the prediction...
# no structure file found, we cannot do anything
# this should not happen because the structure has to be downloadable for the prediction...
logger.log(f"Task {taskId} failed, no structure file found")
logger.close()

status["tasks"][taskId]["status"] = Status.FAILED.value
_save_status_file(status_file, status, taskId)
return

#try to dock the molecule
logger.log(f"Structure file found: {structure_file}")

# try to dock the molecule
try:
logger.log(f"Running docking for task {taskId}")
prepare_docking(os.path.join(docking_directory, str(taskId), "input.json"), structure_file, os.path.join(docking_directory, str(taskId)))
run_docking(os.path.join(docking_directory, str(taskId), "docking_parameters.json"), os.path.join(docking_directory, str(taskId)), os.path.join(docking_directory, str(taskId)), "public")
except Exception as e:
print(repr(e))
print(str(e))
#something went wrong during the docking
#TODO: add some more error handling here, provide a log?
# something went wrong during the docking
logger.log(f"Task {taskId} failed, {str(e)}, {repr(e)}")
logger.close()

status["tasks"][taskId]["status"] = Status.FAILED.value
_save_status_file(status_file, status, taskId)
return

#parse the prediction file and do some calculations - in this case just counting the number of residues per pocket
#API is /docking/<database_name>/<prediction_name>/<hash>/public/<file_name>
#split docking_directory to get database_name and prediction_name
# parse the prediction file and do some calculations - in this case just counting the number of residues per pocket
# API is /docking/<database_name>/<prediction_name>/<hash>/public/<file_name>
# split docking_directory to get database_name and prediction_name
result = []
database_name = docking_directory.split("/")[4]
if "user-upload" in database_name:
Expand All @@ -157,21 +181,25 @@ def execute_directory_task(docking_directory: str, taskId: int):
})
result_json = json.dumps(result)

#save the result file (this directory should already exist, though...)
# save the result file (this directory should already exist, though...)
os.makedirs(os.path.join(docking_directory, str(taskId), "public"), exist_ok=True)

logger.log(f"Saving result file to {result_file}")
with open(result_file, "w", encoding="utf-8") as stream:
try:
stream.write(result_json)
finally:
stream.flush()

#update the status file, reload it first to make sure we don't overwrite any changes
# update the status file, reload it first to make sure we don't overwrite any changes
status = _load_json(status_file)

status["tasks"][taskId]["status"] = Status.SUCCESSFUL.value
_save_status_file(status_file, status, taskId)

logger.log(f"Task {taskId} successfully finished")
logger.close()

def main(arguments):
pass

Expand Down
6 changes: 6 additions & 0 deletions frontend/build/webpack.common.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ module.exports = {
"privacy": clientDirectory("privacy", "privacy.js"),
"terms": clientDirectory("terms", "terms.js"),
"viewer": clientDirectory("viewer", "viewer.ts"),
"visualize": clientDirectory("visualize", "visualize.ts"),
},
"output": {
"path": path.join(__dirname, "..", "dist"),
Expand Down Expand Up @@ -109,6 +110,11 @@ module.exports = {
"template": clientDirectory("viewer", "viewer.html"),
"chunks": ["viewer"],
}),
new HtmlWebpackPlugin({
"filename": "visualize.html",
"template": clientDirectory("visualize", "visualize.html"),
"chunks": ["visualize"],
}),
new WebpackBar(),
]
};
Expand Down
20 changes: 18 additions & 2 deletions frontend/client/custom-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -132,13 +132,22 @@ export enum PolymerColorType {
export interface PolymerRepresentation {
type: PolymerViewType;
representation: StateObjectSelector; //Mol* representation
transparentRepresentationRef: string | null; //transparency reference
overpaintRef: string | null; //overpaint reference
}

export interface PocketRepresentation {
pocketId: string;
type: PocketsViewType;
representation: StateObjectSelector; //Mol* representation
coloredPocket: boolean; //for efficiency when overpainting
selectionType: PocketSelectionType; //either a representation of atoms or whole residues
overpaintRef: string | null; //overpaint reference
}

export enum PocketSelectionType {
Atoms = 0,
Residues = 1
}

/**
Expand All @@ -164,6 +173,9 @@ export interface ReactApplicationState {
numUpdated: number,
tabIndex: number,
initialPocket: number;
pocketRepresentations: PocketRepresentation[];
polymerRepresentations: PolymerRepresentation[];
predictedPolymerRepresentations: PolymerRepresentation[];
}

/**
Expand Down Expand Up @@ -243,7 +255,7 @@ export interface ServerTaskInfo { // info about the task returned from the serve
id: string;
created: string;
lastChange: string;
status: string;
status: "queued" | "running" | "failed" | "successful";
initialData: {
hash: string; //hash of the data
pocket: string; //pocket id
Expand All @@ -262,12 +274,16 @@ export const ServerTaskTypeDescriptors = [ //descriptors for the ServerTaskType
"Molecular docking"
];

export const ServerTaskTypeVisualizationDescriptors = [ //descriptors for the ServerTaskType visualization
"docking"
];

export interface ServerTask {
name: string;
params: string[];
pocket: number;
created: string;
status: string;
status: "queued" | "running" | "failed" | "successful";
type: ServerTaskType;
responseData: any;
}
Expand Down
18 changes: 9 additions & 9 deletions frontend/client/index/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -58,15 +58,6 @@ <h1 class="text-center">
Use original structure
</label>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="conservation-pdb"
title="If checked, a model that exploits conservation will be used to classify protein binding sites."
checked="checked">
<label class="form-check-label" for="conservation-pdb">
Use
<a href="./help#conservation" target="_blank">conservation</a>
</label>
</div>
<div id="pdb-chains" style="display: none">
<input id="pdb-chains-store" style="display: none">
<div id="pdb-chains-label">
Expand All @@ -76,6 +67,15 @@ <h1 class="text-center">
<!-- Chain check boxes are here. -->
</div>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="conservation-pdb"
title="If checked, a model that exploits conservation will be used to classify protein binding sites."
checked="checked">
<label class="form-check-label" for="conservation-pdb">
Use
<a href="./help#conservation" target="_blank">conservation</a>
</label>
</div>
</div>
<div id="input-user-file-block" style="display: none">
<div class="mb-3">
Expand Down
4 changes: 4 additions & 0 deletions frontend/client/index/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,9 @@ class View {

invalidatePdbChain() {
this.pdbChainsLabel.innerText = "Please insert valid PDB code first.";
this.pdbChainsLabel.className += " alert alert-warning";
this.pdbChainsContainer.innerHTML = "";
this.pdbChainsContainer.style.display = "none";
}

beginPdbChainLoading() {
Expand All @@ -182,7 +184,9 @@ class View {

endPdbChainLoading(chains, asChecked) {
this.pdbChainsLabel.innerText = "Chains:";
this.pdbChainsLabel.className = "form-check-inline";
this.pdbChainsContainer.innerHTML = "";
this.pdbChainsContainer.style.display = "inline-block";
chains.forEach(chain => this.pdbChainsContainer.appendChild(
this.createCheckBoxForChain(chain, asChecked)));
}
Expand Down
7 changes: 4 additions & 3 deletions frontend/client/prankweb-api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ export async function fetchPrediction(
* @param id The ID of the prediction.
* @returns An URL to the API endpoint for the prediction.
*/
export function getApiEndpoint(database: string, id: string) {
return `./api/v2/prediction/${database}/${id.toUpperCase()}`;
export function getApiEndpoint(database: string, id: string, taskType: string = "prediction") {
return `./api/v2/${taskType}/${database}/${id.toUpperCase()}`;
}

/**
Expand All @@ -93,5 +93,6 @@ export async function fetchPredictionLog(
* @returns An URL to the ZIP file.
*/
export function getApiDownloadUrl({ database, id }: PredictionInfo) {
return `./api/v2/prediction/${database}/${id}/public/prankweb.zip`;
const baseUrl = getApiEndpoint(database, id);
return `${baseUrl}/public/prankweb.zip`;
}
15 changes: 11 additions & 4 deletions frontend/client/tasks/server-docking-task.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,9 @@ export async function computeDockingTaskOnBackend(prediction: PredictionInfo, po

const hash = await dockingHash(pocket.rank, smiles, exhaustiveness);

await fetch(`./api/v2/docking/${prediction.database}/${prediction.id}/post`, {
const apiEndpoint = getApiEndpoint(prediction.database, prediction.id, "docking");

await fetch(`${apiEndpoint}/post`, {
method: 'POST',
headers: {
'Accept': 'application/json',
Expand Down Expand Up @@ -139,7 +141,8 @@ export async function downloadDockingResult(fileURL: string) {
* @returns null if no task has finished, otherwise the finished task
*/
export async function pollForDockingTask(predictionInfo: PredictionInfo) {
let taskStatusJSON = await fetch(`./api/v2/docking/${predictionInfo.database}/${predictionInfo.id}/tasks`, { cache: "no-store" })
const apiEndpoint = getApiEndpoint(predictionInfo.database, predictionInfo.id, "docking");
let taskStatusJSON = await fetch(`${apiEndpoint}/tasks`, { cache: "no-store" })
.then(res => res.json())
.catch(err => {
return;
Expand All @@ -152,6 +155,8 @@ export async function pollForDockingTask(predictionInfo: PredictionInfo) {
const tasks: ServerTaskLocalStorageData[] = JSON.parse(savedTasks);
if (tasks.length === 0) return;
if (tasks.every((task: ServerTaskLocalStorageData) => task.status === "successful" || task.status === "failed")) return;
// get the count of "queued" tasks
const queuedTasks = taskStatusJSON["tasks"].filter((t: ServerTaskInfo) => t.status === "queued" || t.status === "running").length;
tasks.forEach(async (task: ServerTaskLocalStorageData, i: number) => {
if (task.status === "successful" || task.status === "failed") return;

Expand All @@ -161,14 +166,16 @@ export async function pollForDockingTask(predictionInfo: PredictionInfo) {
if (individualTask) {
if (individualTask.status !== task.status) {
//update the status
tasks[i].status = individualTask.status;
tasks[i].status = `${individualTask.status}`;

//download the computed data
if (individualTask.status === "successful") {
const hash = await dockingHash(task.pocket.toString(), individualTask.initialData.smiles, individualTask.initialData.exhaustiveness);
const data = await fetch(`./api/v2/docking/${predictionInfo.database}/${predictionInfo.id}/${hash}/public/result.json`)
const data = await fetch(`${apiEndpoint}/${hash}/public/result.json`)
.then(res => res.json()).catch(err => console.log(err));
tasks[i].responseData = data;
} else if (individualTask.status === "queued") {
tasks[i].status += ` (${queuedTasks} in queue)`;
}

//save the updated tasks
Expand Down
2 changes: 0 additions & 2 deletions frontend/client/viewer/application.css
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@
}

#application-rcsb {
position: relative;
overflow-x: auto;
overflow-y: auto;
height: calc(30vh - 68px);
margin-bottom: 10px;
Expand Down
Loading

0 comments on commit d0a9ab1

Please sign in to comment.