Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/ground_real' into ground_real
Browse files Browse the repository at this point in the history
  • Loading branch information
RobertJaro committed Aug 17, 2023
2 parents c592599 + 2c609de commit 9a0a706
Show file tree
Hide file tree
Showing 3 changed files with 141 additions and 14 deletions.
85 changes: 85 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,3 +90,88 @@ python -m pytest test
Dependencies are specified in the `pyproject` file. Only add dependencies which are required by the project to avoid bloated environments. Add _universal_ dependencies (like Pytorch) in the `[project]` section.

Add optional dependencies for the different toolsets in `[project.optional-dependencies]`.

## Set up training on ScanAI

### Log in to Scan

Download and install [openvpn](https://openvpn.net/client/).

Prepare `fdl.ovpn` config file.

Start openvpn client with the config file. (linux)
```bash
sudo openvpn --config fdl.ovpn
```

```bash
ssh [email protected]
```
Enter the password.

### Clone git repo

Once logged in, the username is `fdl` by default.

```bash
mkdir workspace
cd workspace
git clone https://github.com/FrontierDevelopmentLab/2023-europe-space-weather.git
```

### Download training data

Follow commands in `scripts/run_icarus.sh` to download data to `~/mnt` instead of `/mnt`.

Note: No permission to modify `/mnt`.

```bash
mkdir ~/mnt
mkdir ~/mnt/ground-data
gsutil -m cp -R gs://fdl23_europe_helio_onground/ground-data/data_fits ~/mnt/ground-data/
gsutil -m cp -R gs://fdl23_europe_helio_onground/ground-data/PSI ~/mnt/ground-data/
gsutil -m cp -R gs://fdl_space_weather_data/events/fdl_stereo_2014_02_prep.zip ~/mnt/ground-data/
```

### Screen session (optional)

Start a screen session before running a docker container.
```bash
screen
```

### Docker

Download the [PyTorch Docker image](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/pytorch).
```bash
docker pull nvcr.io/nvidia/pytorch:22.01-py3
```

Run a Docker container from the image above and mount the appropriate volumes.
```bash
docker run -v /home/fdl/workspace/2023-europe-space-weather/:/workspace/2023-europe-space-weather/ -v /home/fdl/mnt:/mnt --gpus all -it --rm nvcr.io/nvidia/pytorch:22.01-py3
```

Inside the docker container, install the requirements.
```bash
pip install -r requirements.txt
```

### Data prep

Unzip the zip file.
```bash
python
```

```python
import zipfile

path_to_zip_file = "/mnt/ground-data/fdl_stereo_2014_02_prep.zip"
directory_to_extract_to = "/mnt/ground-data/data_fits_stereo_2014_02"

with zipfile.ZipFile(path_to_zip_file, 'r') as zip_ref:
zip_ref.extractall(directory_to_extract_to)
```

Follow commands in `scripts/run_icarus.sh` to prep data in `/mnt/prep-data`.
16 changes: 15 additions & 1 deletion scripts/run_icarus.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/dat
python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/dcmer_280W_bang_0000_pB/*.fits" --output_path /mnt/prep-data/prep_HAO_2view_background
python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/dcmer_280W_bang_0000_tB/*.fits" --output_path /mnt/prep-data/prep_HAO_2view_background

# Prep 5view
python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/dcmer_340W_bang_0000_*B/*.fits" --output_path /mnt/prep-data/prep_HAO_5view --check_matching
python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/dcmer_280W_bang_0000_*B/*.fits" --output_path /mnt/prep-data/prep_HAO_5view --check_matching
python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/dcmer_220W_bang_0000_*B/*.fits" --output_path /mnt/prep-data/prep_HAO_5view --check_matching
python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/dcmer_100W_bang_0000_*B/*.fits" --output_path /mnt/prep-data/prep_HAO_5view --check_matching
python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/dcmer_040W_bang_0000_*B/*.fits" --output_path /mnt/prep-data/prep_HAO_5view --check_matching

python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/data_fits/*_bang_0000_*/stepnum_005.fits" --output_path /mnt/prep-data/prep_HAO_2view_background --check_matching

#####################################
Expand All @@ -31,7 +38,7 @@ python -m sunerf.prep.prep_hao --resolution 512 --hao_path "/mnt/ground-data/dat
#####################################

# Download all viewpoints first into data_fits subdirectory, then download all 005.step files as well.
gsutil -m cp -R gs://fdl23_europe_helio_onground/ground-data/data_fits /mnt/ground-data/data_fits
gsutil -m cp -R gs://fdl23_europe_helio_onground/ground-data/data_fits /mnt/ground-data/

# Download 2 Viewpoints only into data_fits directory, emulating L5 and Earth (60° Diff) /mnt/ground-data/data_fits
gsutil -m cp -R gs://fdl23_europe_helio_onground/ground-data/data_fits/dcmer_340W_bang_0000_tB /mnt/ground-data/data_fits/
Expand Down Expand Up @@ -98,6 +105,10 @@ gsutil -m cp gs://fdl23_europe_helio_onground/ground-data/data_fits/dcmer_360W_

# Download for all of the PSI Data
gsutil -m cp -R gs://fdl23_europe_helio_onground/ground-data/PSI /mnt/ground-data/PSI/

# Download observational data
gsutil -m cp -R gs://fdl_space_weather_data/events/fdl_stereo_2014_02_prep.zip /mnt/ground-data/

################
# #
# Prep Data #
Expand Down Expand Up @@ -136,6 +147,9 @@ python -m sunerf.sunerf --wandb_name "hao_pinn_2view_background" --data_path_pB
# prep_HAO_allview
python -m sunerf.sunerf --wandb_name "hao_pinn_all" --data_path_pB "/mnt/prep-data/prep_HAO_allview/*pB*.fits" --data_path_tB "/mnt/prep-data/prep_HAO_allview/*tB*.fits" --path_to_save "/mnt/training/HAO_pinn_allview" --train "config/train.yaml" --hyperparameters "config/hyperparams_hao.yaml"

# Prep_HAO_5view
python -m sunerf.sunerf --wandb_name "hao_pinn_5view" --data_path_pB "/mnt/prep-data/prep_HAO_5view/*pB*.fits" --data_path_tB "/mnt/prep-data/prep_HAO_5view/*tB*.fits" --path_to_save "/mnt/training/HAO_pinn_5view" --train "config/train.yaml" --hyperparameters "config/hyperparams_hao.yaml"

# full training PSI
python -m sunerf.sunerf --wandb_name "psi" --data_path_pB "/mnt/prep-data/prep_PSI/pb_raw/*.fits" --data_path_tB "/mnt/prep-data/prep_PSI/b_raw/*.fits" --path_to_save "/mnt/training/PSI_v2" --train "config/train.yaml" --hyperparameters "config/hyperparams_psi.yaml"

Expand Down
54 changes: 41 additions & 13 deletions sunerf/evaluation/density_cube_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,34 @@
from sunpy.map import Map
from datetime import datetime
import pickle
import json

from sunerf.evaluation.loader import SuNeRFLoader
from sunerf.utilities.data_loader import normalize_datetime

START_STEPNUM = 37 # 5
END_STEPNUM = 37 # 74
'''
python -m sunerf.evaluation.density_cube_eval
'''

START_STEPNUM = 5
END_STEPNUM = 74
CHUNKS = 4

# R_SUN_CM = 6.957e+10
# GRID_SIZE = 500 / 16 # solar radii

ignore_half_of_r = True

# og-eda and og-eda-3
ckpt_dirname = "HAO_pinn_cr_2view_a26978f_heliographic_reformat"
# og-eda
# ckpt_dirname = "HAO_pinn_2view_no_physics"
# ckpt_dirname = "HAO_pinn_2view_cr"
# og-eda-3
# ckpt_dirname = "HAO_pinn_2view_cr3"
# ckpt_dirname = "HAO_pinn_1view_cr3"


def save_stepnum_to_datetime():
stepnum_to_datetime = dict()

Expand Down Expand Up @@ -45,23 +62,25 @@ def dtstr_to_datetime(dtstr):

mae_all_stepnums = []

for stepnum in range(START_STEPNUM, END_STEPNUM + 1, 1):
for stepnum in tqdm(range(START_STEPNUM, END_STEPNUM + 1, 1)):

# load ground truth
gt_fname = "/mnt/ground-data/density_cube/dens_stepnum_%03d.sav" % stepnum
o = scipy.io.readsav(gt_fname)
ph = o['ph1d'] # (258,)
th = o['th1d'] # (128,)
r = o['r1d'] # (256,)
density_gt = o['dens'] # (258, 128, 256) (phi, theta, r)


# ignore half of r
# r_size = len(o['r1d'])
# r = o['r1d'][:int(r_size / 2)] # (256,) -> (128, 0)
# density_gt = o['dens'][:,:,:int(r_size / 2)] # (258, 128, 256) (phi, theta, r)
if ignore_half_of_r:
r_size = len(o['r1d'])
r = o['r1d'][:int(r_size / 2)] # (256,) -> (128, 0)
density_gt = o['dens'][:,:,:int(r_size / 2)] # (258, 128, 256) (phi, theta, r)
else:
r = o['r1d'] # (256,)
density_gt = o['dens'] # (258, 128, 256) (phi, theta, r)

# load model checkpoint
base_path = '/mnt/training/HAO_pinn_cr_2view_a26978f_heliographic_reformat'
base_path = '/mnt/training/' + ckpt_dirname
chk_path = os.path.join(base_path, 'save_state.snf')
loader = SuNeRFLoader(chk_path, resolution=512)

Expand Down Expand Up @@ -100,8 +119,8 @@ def dtstr_to_datetime(dtstr):
# density *= GRID_SIZE ** (-2) * R_SUN_CM ** (-3)

# compare density to ground truth
rel_density = density / np.mean(density)
rel_density_gt = density_gt / np.mean(density_gt)
rel_density = density / np.median(density)
rel_density_gt = density_gt / np.median(density_gt)

print(rel_density[0])
print(rel_density_gt[0])
Expand All @@ -110,4 +129,13 @@ def dtstr_to_datetime(dtstr):

mae_all_stepnums.append(mae)

print(sum(mae_all_stepnums) / len(mae_all_stepnums))
print(mae_all_stepnums)
mae_avg = sum(mae_all_stepnums) / len(mae_all_stepnums)
print(mae_avg)


# save eval to json
output_fname = "eval_half.json" if ignore_half_of_r else "eval.json"
eval_dict = {"mae_all_stepnums": mae_all_stepnums, "mae_avg": mae_avg}
with open(os.path.join(base_path, output_fname), 'w') as fp:
json.dump(eval_dict, fp)

0 comments on commit 9a0a706

Please sign in to comment.