-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit 25797ad
Showing
25 changed files
with
11,205 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
# Python | ||
__pycache__ | ||
.pyc | ||
|
||
|
||
# Modeling | ||
_data | ||
_models | ||
_logs |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
# Bayesian Uncertainty for Quality Assessment of Deep Learning Contours | ||
This repository contains Tensorflow2.4 code for the paper(s) | ||
- Comparing Bayesian Models for Organ Contouring in Headand Neck Radiotherapy | ||
|
||
|
||
## Installation | ||
1. Install [Anaconda](https://docs.anaconda.com/anaconda/install/) with python3.7 | ||
2. Install [git](https://git-scm.com/downloads) | ||
3. Open a terminal and follow the commands | ||
- Clone this repository | ||
- `git clone [email protected]:prerakmody/hansegmentation-uncertainty-qa.git` | ||
- Create conda env | ||
- (Specifically For Windows): `conda init powershell` (and restart the terminal) | ||
- (For all plaforms) | ||
``` | ||
cd hansegmentation-uncertainty-qa | ||
conda deactivate | ||
conda create --name hansegmentation-uncertainty-qa python=3.8 | ||
conda activate hansegmentation-uncertainty-qa | ||
conda develop . # check for conda.pth file in $ANACONDA_HOME/envs/hansegmentation-uncertainty-qa/lib/python3.8/site-packages | ||
``` | ||
- Install packages | ||
- Tensorflow (check [here]((https://www.tensorflow.org/install/source#tested_build_configurations)) for CUDA/cuDNN requirements) | ||
- (stick to the exact commands) | ||
- For tensorflow2.4 | ||
``` | ||
conda install -c nvidia cudnn=8.0.0=cuda11.0_0 | ||
pip install tensorflow==2.4 | ||
``` | ||
- Check tensorflow installation | ||
``` | ||
python -c "import tensorflow as tf;print('\n\n\n====================== \n GPU Devices: ',tf.config.list_physical_devices('GPU'), '\n======================')" | ||
python -c "import tensorflow as tf;print('\n\n\n====================== \n', tf.reduce_sum(tf.random.normal([1000, 1000])), '\n======================' )" | ||
``` | ||
- [unix] upon running either of the above commands, you will see tensorflow searching for library files like libcudart.so, libcublas.so, libcublasLt.so, libcufft.so, libcurand.so, libcusolver.so, libcusparse.so, libcudnn.so in the location `$ANACONDA_HOME/envs/hansegmentation-uncertainty-qa/lib/` | ||
- [windows] upon running either of the above commands, you will see tensorflow searching for library files like cudart64_110.dll ... and so on in the location `$ANACONDA_HOME\envs\hansegmentation-uncertainty-qa\Library\bin` | ||
- Other tensorflow pacakges | ||
``` | ||
pip install tensorflow-probability==0.12.1 tensorflow-addons==0.12.1 | ||
``` | ||
- Other packages | ||
``` | ||
pip install scipy seaborn tqdm psutil humanize pynrrd pydicom SimpleITK itk scikit-image | ||
pip install psutil humanize pynvml | ||
``` | ||
# Notes | ||
- All the `src/train{}.py` files are the ones used to train the models as shown in the `demo/` folder |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
# Import private libraries | ||
import src.config as config | ||
from src.model.trainer import Trainer,Validator | ||
|
||
# Import public libraries | ||
import os | ||
import pdb | ||
import traceback | ||
import tensorflow as tf | ||
from pathlib import Path | ||
|
||
|
||
|
||
if __name__ == "__main__": | ||
|
||
exp_name = 'HansegmentationUncertaintyQA-Dropout-CE' | ||
|
||
data_dir = Path(config.MAIN_DIR).joinpath('medical_dataloader', '_data') | ||
resampled = True | ||
crop_init = True | ||
grid = True | ||
batch_size = 2 | ||
|
||
model = config.MODEL_FOCUSNET_DROPOUT | ||
|
||
# To train | ||
params = { | ||
'exp_name': exp_name | ||
, 'random_seed':42 | ||
, 'dataloader':{ | ||
'data_dir': data_dir | ||
, 'dir_type' : [config.DATALOADER_MICCAI2015_TRAIN, config.DATALOADER_MICCAI2015_TRAIN_ADD] | ||
, 'resampled' : resampled | ||
, 'crop_init' : crop_init | ||
, 'grid' : grid | ||
, 'random_grid': True | ||
, 'filter_grid': False | ||
, 'centred_prob' : 0.3 | ||
, 'batch_size' : batch_size | ||
, 'shuffle' : 5 | ||
, 'prefetch_batch': 4 | ||
, 'parallel_calls': 3 | ||
} | ||
, 'model': { | ||
'name': model | ||
, 'optimizer' : config.OPTIMIZER_ADAM | ||
, 'init_lr' : 0.001 | ||
, 'fixed_lr' : True | ||
, 'epochs' : 1500 | ||
, 'epochs_save': 50 | ||
, 'epochs_eval': 50 | ||
, 'epochs_viz' : 500 | ||
, 'load_model':{ | ||
'load':False, 'load_exp_name': None, 'load_epoch':-1, 'load_optimizer_lr':None | ||
} | ||
, 'profiler': { | ||
'profile': False | ||
, 'epochs': [2,3] | ||
, 'steps_per_epoch': 60 | ||
, 'starting_step': 4 | ||
} | ||
, 'model_tboard': False | ||
} | ||
, 'metrics' : { | ||
'logging_tboard': True | ||
# for full 3D volume | ||
, 'metrics_eval': {'Dice': config.LOSS_DICE} | ||
## for smaller grid/patch | ||
, 'metrics_loss' : {'CE': config.LOSS_CE} # [config.LOSS_CE, config.LOSS_DICE] | ||
, 'loss_weighted' : {'CE': True} | ||
, 'loss_mask' : {'CE': True} | ||
, 'loss_combo' : {'CE': 1.0} | ||
} | ||
, 'others': { | ||
'epochs_timer': 20 | ||
, 'epochs_memory':5 | ||
} | ||
} | ||
|
||
# Call the trainer | ||
trainer = Trainer(params) | ||
trainer.train() | ||
|
||
# To evaluate on MICCAI2015 | ||
params = { | ||
'exp_name': exp_name | ||
, 'pid' : os.getpid() | ||
, 'dataloader': { | ||
'data_dir' : data_dir | ||
, 'resampled' : resampled | ||
, 'grid' : grid | ||
, 'crop_init' : crop_init | ||
, 'batch_size' : batch_size | ||
, 'prefetch_batch': 1 | ||
, 'dir_type' : [config.DATALOADER_MICCAI2015_TEST] # [config.DATALOADER_MICCAI2015_TESTONSITE] | ||
, 'eval_type' : config.MODE_TEST | ||
} | ||
, 'model': { | ||
'name': model | ||
, 'load_epoch' : 1000 | ||
, 'MC_RUNS' : 30 | ||
, 'training_bool' : True # [True=dropout-at-test-time, False=no-dropout-at-test-time] | ||
} | ||
, 'save': True | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
# Import private libraries | ||
import src.config as config | ||
from src.model.trainer import Trainer,Validator | ||
|
||
# Import public libraries | ||
import os | ||
import pdb | ||
import traceback | ||
import tensorflow as tf | ||
from pathlib import Path | ||
|
||
|
||
|
||
if __name__ == "__main__": | ||
|
||
exp_name = 'HansegmentationUncertaintyQA-Dropout-CEBasic' | ||
|
||
data_dir = Path(config.MAIN_DIR).joinpath('medical_dataloader', '_data') | ||
resampled = True | ||
crop_init = True | ||
grid = True | ||
batch_size = 2 | ||
|
||
model = config.MODEL_FOCUSNET_DROPOUT | ||
|
||
# To train | ||
params = { | ||
'exp_name': exp_name | ||
, 'random_seed':42 | ||
, 'dataloader':{ | ||
'data_dir': data_dir | ||
, 'dir_type' : [config.DATALOADER_MICCAI2015_TRAIN, config.DATALOADER_MICCAI2015_TRAIN_ADD] | ||
, 'resampled' : resampled | ||
, 'crop_init' : crop_init | ||
, 'grid' : grid | ||
, 'random_grid': True | ||
, 'filter_grid': False | ||
, 'centred_prob' : 0.3 | ||
, 'batch_size' : batch_size | ||
, 'shuffle' : 5 | ||
, 'prefetch_batch': 4 | ||
, 'parallel_calls': 3 | ||
} | ||
, 'model': { | ||
'name': model | ||
, 'optimizer' : config.OPTIMIZER_ADAM | ||
, 'init_lr' : 0.001 | ||
, 'fixed_lr' : True | ||
, 'epochs' : 1500 | ||
, 'epochs_save': 50 | ||
, 'epochs_eval': 50 | ||
, 'epochs_viz' : 500 | ||
, 'load_model':{ | ||
'load':False, 'load_exp_name': None, 'load_epoch':-1, 'load_optimizer_lr':None | ||
} | ||
, 'profiler': { | ||
'profile': False | ||
, 'epochs': [2,3] | ||
, 'steps_per_epoch': 60 | ||
, 'starting_step': 4 | ||
} | ||
, 'model_tboard': False | ||
} | ||
, 'metrics' : { | ||
'logging_tboard': True | ||
# for full 3D volume | ||
, 'metrics_eval': {'Dice': config.LOSS_DICE} | ||
## for smaller grid/patch | ||
, 'metrics_loss' : {'CE-Basic': config.LOSS_CE_BASIC} | ||
, 'loss_weighted' : {'CE-Basic': True} | ||
, 'loss_mask' : {'CE-Basic': True} | ||
, 'loss_combo' : {'CE-Basic': 1.0} | ||
} | ||
, 'others': { | ||
'epochs_timer': 20 | ||
, 'epochs_memory':5 | ||
} | ||
} | ||
|
||
# Call the trainer | ||
trainer = Trainer(params) | ||
trainer.train() | ||
|
||
# To evaluate on MICCAI2015 | ||
params = { | ||
'exp_name': exp_name | ||
, 'pid' : os.getpid() | ||
, 'dataloader': { | ||
'data_dir' : data_dir | ||
, 'resampled' : resampled | ||
, 'grid' : grid | ||
, 'crop_init' : crop_init | ||
, 'batch_size' : batch_size | ||
, 'prefetch_batch': 1 | ||
, 'dir_type' : [config.DATALOADER_MICCAI2015_TEST] # [config.DATALOADER_MICCAI2015_TESTONSITE] | ||
, 'eval_type' : config.MODE_TEST | ||
} | ||
, 'model': { | ||
'name': model | ||
, 'load_epoch' : 1000 | ||
, 'MC_RUNS' : 30 | ||
, 'training_bool' : True # [True=dropout-at-test-time, False=no-dropout-at-test-time] | ||
} | ||
, 'save': True | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
# Import private libraries | ||
import src.config as config | ||
from src.model.trainer import Trainer,Validator | ||
|
||
# Import public libraries | ||
import os | ||
import pdb | ||
import traceback | ||
import tensorflow as tf | ||
from pathlib import Path | ||
|
||
|
||
|
||
if __name__ == "__main__": | ||
|
||
exp_name = 'HansegmentationUncertaintyQA-Dropout-DICE' | ||
|
||
data_dir = Path(config.MAIN_DIR).joinpath('medical_dataloader', '_data') | ||
resampled = True | ||
crop_init = True | ||
grid = True | ||
batch_size = 2 | ||
|
||
model = config.MODEL_FOCUSNET_DROPOUT | ||
|
||
# To train | ||
params = { | ||
'exp_name': exp_name | ||
, 'random_seed':42 | ||
, 'dataloader':{ | ||
'data_dir': data_dir | ||
, 'dir_type' : [config.DATALOADER_MICCAI2015_TRAIN, config.DATALOADER_MICCAI2015_TRAIN_ADD] | ||
, 'resampled' : resampled | ||
, 'crop_init' : crop_init | ||
, 'grid' : grid | ||
, 'random_grid': True | ||
, 'filter_grid': False | ||
, 'centred_prob' : 0.3 | ||
, 'batch_size' : batch_size | ||
, 'shuffle' : 5 | ||
, 'prefetch_batch': 4 | ||
, 'parallel_calls': 3 | ||
} | ||
, 'model': { | ||
'name': model | ||
, 'optimizer' : config.OPTIMIZER_ADAM | ||
, 'init_lr' : 0.001 | ||
, 'fixed_lr' : True | ||
, 'epochs' : 1500 | ||
, 'epochs_save': 50 | ||
, 'epochs_eval': 50 | ||
, 'epochs_viz' : 500 | ||
, 'load_model':{ | ||
'load':False, 'load_exp_name': None, 'load_epoch':-1, 'load_optimizer_lr':None | ||
} | ||
, 'profiler': { | ||
'profile': False | ||
, 'epochs': [2,3] | ||
, 'steps_per_epoch': 60 | ||
, 'starting_step': 4 | ||
} | ||
, 'model_tboard': False | ||
} | ||
, 'metrics' : { | ||
'logging_tboard': True | ||
# for full 3D volume | ||
, 'metrics_eval': {'Dice': config.LOSS_DICE} | ||
## for smaller grid/patch | ||
, 'metrics_loss' : {'Dice': config.LOSS_DICE} | ||
, 'loss_weighted' : {'Dice': True} | ||
, 'loss_mask' : {'Dice': True} | ||
, 'loss_combo' : {'Dice': 1.0} | ||
} | ||
, 'others': { | ||
'epochs_timer': 20 | ||
, 'epochs_memory':5 | ||
} | ||
} | ||
|
||
# Call the trainer | ||
trainer = Trainer(params) | ||
trainer.train() | ||
|
||
# To evaluate on MICCAI2015 | ||
params = { | ||
'exp_name': exp_name | ||
, 'pid' : os.getpid() | ||
, 'dataloader': { | ||
'data_dir' : data_dir | ||
, 'resampled' : resampled | ||
, 'grid' : grid | ||
, 'crop_init' : crop_init | ||
, 'batch_size' : batch_size | ||
, 'prefetch_batch': 1 | ||
, 'dir_type' : [config.DATALOADER_MICCAI2015_TEST] # [config.DATALOADER_MICCAI2015_TESTONSITE] | ||
, 'eval_type' : config.MODE_TEST | ||
} | ||
, 'model': { | ||
'name': model | ||
, 'load_epoch' : 1000 | ||
, 'MC_RUNS' : 30 | ||
, 'training_bool' : True # [True=dropout-at-test-time, False=no-dropout-at-test-time] | ||
} | ||
, 'save': True | ||
} |
Oops, something went wrong.