Skip to content

Commit

Permalink
modified examples
Browse files Browse the repository at this point in the history
  • Loading branch information
schwemro committed Aug 13, 2024
1 parent 5e2b82a commit 93eb122
Show file tree
Hide file tree
Showing 5 changed files with 652 additions and 103 deletions.
10 changes: 5 additions & 5 deletions examples/catchment_scale/StressRes/Moehlin/oneD/make_figures.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,14 +252,14 @@ def xr_annual_avg(ds, var):
fig.savefig(file, dpi=300)
plt.close(fig)

states_hm_file = base_path_output / "ONED_Moehlin_total.nc"
states_hm_file = base_path_output / "raster" / "ONED_Moehlin_total.nc"
ds_sim1 = xr.open_dataset(states_hm_file, engine="h5netcdf")

states_hm_file = base_path_output / "RoGeR_WBM_1D" / "total.nc"
ds_sim2 = xr.open_dataset(states_hm_file, engine="h5netcdf")
# states_hm_file = base_path_output / "RoGeR_WBM_1D" / "total.nc"
# ds_sim2 = xr.open_dataset(states_hm_file, engine="h5netcdf")

cmap1 = mpl.colormaps.get_cmap('viridis_r').resampled(6)
vars_sim = ["cpr_ss", "q_ss"]
vars_sim = ["cpr_ss", "q_ss", "q_sub"]
for var_sim in vars_sim:
# mask = (ds_sim2[_dict_vars[var_sim]].values <= 0)
# vals2 = onp.where(mask, onp.nan, ds_sim2[_dict_vars[var_sim]].values)
Expand All @@ -276,7 +276,7 @@ def xr_annual_avg(ds, var):
fig.savefig(file, dpi=300)
plt.close(fig)

vars_sim = ["cpr_ss", "q_ss"]
vars_sim = ["cpr_ss", "q_ss", "q_sub"]
for var_sim in vars_sim:
# mask = (ds_sim2[_dict_vars[var_sim]].values <= 0)
# vals2 = onp.where(mask, onp.nan, ds_sim2[_dict_vars[var_sim]].values)
Expand Down
173 changes: 87 additions & 86 deletions examples/catchment_scale/StressRes/Moehlin/oneD/total_aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,100 +41,101 @@
if not os.path.exists(base_path_figs):
os.mkdir(base_path_figs)

# directory of output
base_path_output = Path("/Volumes/LaCie/roger/examples/catchment_scale/StressRes/Moehlin") / "output" / "oneD" / "polygons"
if not os.path.exists(base_path_output):
os.mkdir(base_path_output)
for geometry in ["polygons", "raster"]:
# directory of output
base_path_output = Path("/Volumes/LaCie/roger/examples/catchment_scale/StressRes/Moehlin") / "output" / "oneD" / geometry
if not os.path.exists(base_path_output):
os.mkdir(base_path_output)

# load configuration file
file_config = base_path / "config.yml"
with open(file_config, "r") as file:
config = yaml.safe_load(file)
# load configuration file
file_config = base_path / "config.yml"
with open(file_config, "r") as file:
config = yaml.safe_load(file)


# load RoGeR model parameters
params_hm_file = base_path / "parameters.nc"
ds_params = xr.open_dataset(params_hm_file, engine="h5netcdf")
dem = ds_params["dgm"].values
mask = onp.isfinite(dem)
# load RoGeR model parameters
params_hm_file = base_path / "parameters.nc"
ds_params = xr.open_dataset(params_hm_file, engine="h5netcdf")
dem = ds_params["dgm"].values
mask = onp.isfinite(dem)

# years to be aggregated
years = onp.arange(2013, 2023, dtype=int)
# variables to be aggregated
vars_sim = ["prec", "aet", "pet", "q_ss", "q_sub", "cpr_ss", "q_hof"]
# years to be aggregated
years = onp.arange(2013, 2023, dtype=int)
# variables to be aggregated
vars_sim = ["prec", "aet", "pet", "q_ss", "q_sub", "cpr_ss", "q_hof"]

# merge model output into single file
states_agg_file = base_path_output / "ONED_Moehlin_total.nc"
if not os.path.exists(states_agg_file):
with h5netcdf.File(states_agg_file, "w", decode_vlen_strings=False) as f:
f.attrs.update(
date_created=datetime.datetime.today().isoformat(),
title="Average of annual sums/means of RoGeR simulations for the Moehlin catchment, Germany (2013-2022)",
institution="University of Freiburg, Chair of Hydrology",
references="",
comment="First timestep (t=0) contains initial values. Simulations start are written from second timestep (t=1) to last timestep (t=N).",
model_structure="1D model with capillary rise",
roger_version=f"{roger.__version__}",
)
# collect dimensions
# set dimensions with a dictionary
dict_dim = {
"x": config['nx'],
"y": config['ny'],
"Year": len(years),
}
if not f.dimensions:
f.dimensions = dict_dim
v = f.create_variable("x", ("x",), float, compression="gzip", compression_opts=1)
v.attrs["long_name"] = "x"
v.attrs["units"] = "m"
v[:] = onp.arange(dict_dim["x"]) * 25
v = f.create_variable("y", ("y",), float, compression="gzip", compression_opts=1)
v.attrs["long_name"] = "y"
v.attrs["units"] = "m"
v[:] = onp.arange(dict_dim["y"]) * 25
v = f.create_variable("Year", ("Year",), int, compression="gzip", compression_opts=1)
v[:] = years
# merge model output into single file
states_agg_file = base_path_output / "ONED_Moehlin_total.nc"
if not os.path.exists(states_agg_file):
with h5netcdf.File(states_agg_file, "w", decode_vlen_strings=False) as f:
f.attrs.update(
date_created=datetime.datetime.today().isoformat(),
title="Average of annual sums/means of RoGeR simulations for the Moehlin catchment, Germany (2013-2022)",
institution="University of Freiburg, Chair of Hydrology",
references="",
comment="First timestep (t=0) contains initial values. Simulations start are written from second timestep (t=1) to last timestep (t=N).",
model_structure="1D model with capillary rise",
roger_version=f"{roger.__version__}",
)
# collect dimensions
# set dimensions with a dictionary
dict_dim = {
"x": config['nx'],
"y": config['ny'],
"Year": len(years),
}
if not f.dimensions:
f.dimensions = dict_dim
v = f.create_variable("x", ("x",), float, compression="gzip", compression_opts=1)
v.attrs["long_name"] = "x"
v.attrs["units"] = "m"
v[:] = onp.arange(dict_dim["x"]) * 25
v = f.create_variable("y", ("y",), float, compression="gzip", compression_opts=1)
v.attrs["long_name"] = "y"
v.attrs["units"] = "m"
v[:] = onp.arange(dict_dim["y"]) * 25
v = f.create_variable("Year", ("Year",), int, compression="gzip", compression_opts=1)
v[:] = years

# load hydrological simulations
states_hm_file = base_path_output / f"{config['identifier']}.nc"
ds_sim = xr.open_dataset(states_hm_file, engine="h5netcdf")
# load hydrological simulations
states_hm_file = base_path_output / f"{config['identifier']}.nc"
ds_sim = xr.open_dataset(states_hm_file, engine="h5netcdf")

# assign date
days = (ds_sim['Time'].values / onp.timedelta64(24 * 60 * 60, "s"))
date = num2date(
days,
units=f"days since {ds_sim['Time'].attrs['time_origin']}",
calendar="standard",
only_use_cftime_datetimes=False,
)
ds_sim = ds_sim.assign_coords(Time=("Time", date))
for var_sim in vars_sim:
v = f.create_variable(var_sim, ("x", "y"), float, compression="gzip", compression_opts=1)
vals = ds_sim[var_sim].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals, onp.nan)
v.attrs.update(long_name=var_sim, units="mm/year")
# assign date
days = (ds_sim['Time'].values / onp.timedelta64(24 * 60 * 60, "s"))
date = num2date(
days,
units=f"days since {ds_sim['Time'].attrs['time_origin']}",
calendar="standard",
only_use_cftime_datetimes=False,
)
ds_sim = ds_sim.assign_coords(Time=("Time", date))
for var_sim in vars_sim:
v = f.create_variable(var_sim, ("x", "y"), float, compression="gzip", compression_opts=1)
vals = ds_sim[var_sim].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals, onp.nan)
v.attrs.update(long_name=var_sim, units="mm/year")

v = f.create_variable("inf", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_mat_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals2 = ds_sim["inf_mp_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals3 = ds_sim["inf_sc_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals4 = ds_sim["inf_ss"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1 + vals2 + vals3 + vals4, onp.nan)
v.attrs.update(long_name="inf", units="mm/year")
v = f.create_variable("inf", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_mat_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals2 = ds_sim["inf_mp_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals3 = ds_sim["inf_sc_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals4 = ds_sim["inf_ss"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1 + vals2 + vals3 + vals4, onp.nan)
v.attrs.update(long_name="inf", units="mm/year")

v = f.create_variable("inf_mat", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_mat_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1, onp.nan)
v.attrs.update(long_name="inf_mat", units="mm/year")
v = f.create_variable("inf_mat", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_mat_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1, onp.nan)
v.attrs.update(long_name="inf_mat", units="mm/year")

v = f.create_variable("inf_mp", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_mp_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals2 = ds_sim["inf_ss"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1 + vals2, onp.nan)
v.attrs.update(long_name="inf_mp", units="mm/year")
v = f.create_variable("inf_mp", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_mp_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
vals2 = ds_sim["inf_ss"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1 + vals2, onp.nan)
v.attrs.update(long_name="inf_mp", units="mm/year")

v = f.create_variable("inf_sc", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_sc_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1, onp.nan)
v.attrs.update(long_name="inf_sc", units="mm/year")
v = f.create_variable("inf_sc", ("x", "y"), float, compression="gzip", compression_opts=1)
vals1 = ds_sim["inf_sc_rz"].sel(Time=slice('2013', '2022')).resample(Time='YS').sum().mean(axis=-1)
v[:, :] = onp.where(mask, vals1, onp.nan)
v.attrs.update(long_name="inf_sc", units="mm/year")
Loading

0 comments on commit 93eb122

Please sign in to comment.