Skip to content

Commit

Permalink
other naming changes
Browse files Browse the repository at this point in the history
  • Loading branch information
jcblemai committed Sep 12, 2023
1 parent 4d5a6ef commit d92ec18
Show file tree
Hide file tree
Showing 16 changed files with 92 additions and 92 deletions.
4 changes: 2 additions & 2 deletions flepimop/gempyor_pkg/docs/integration_benchmark.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@
"\n",
"s = setup.Setup(\n",
" setup_name=config[\"name\"].get() + \"_\" + str(npi_scenario),\n",
" spatial_setup=setup.SpatialSetup(\n",
" spatial_setup=setup.SubpopulationStructure(\n",
" setup_name=config[\"setup_name\"].get(),\n",
" geodata_file=spatial_base_path / spatial_config[\"geodata\"].get(),\n",
" mobility_file=spatial_base_path / spatial_config[\"mobility\"].get(),\n",
Expand Down Expand Up @@ -444,7 +444,7 @@
" npi = NPI.NPIBase.execute(\n",
" npi_config=s.npi_config,\n",
" global_config=config,\n",
" subpop=s.spatset.subpop,\n",
" subpop=s.subpop_struct.subpop,\n",
" pnames_overlap_operation_sum=s.parameters.intervention_overlap_operation[\"sum\"],\n",
" )\n",
"\n",
Expand Down
4 changes: 2 additions & 2 deletions flepimop/gempyor_pkg/src/gempyor/dev/dev_seir.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
config.read(user=False)
config.set_file(f"{DATA_DIR}/config.yml")

ss = setup.SpatialSetup(
ss = setup.SubpopulationStructure(
setup_name="test_seir",
geodata_file=f"{DATA_DIR}/geodata.csv",
mobility_file=f"{DATA_DIR}/mobility.txt",
Expand Down Expand Up @@ -58,7 +58,7 @@
mobility_data_indices = s.mobility.indptr
mobility_data = s.mobility.data

npi = NPI.NPIBase.execute(npi_config=s.npi_config_seir, global_config=config, subpops=s.spatset.subpop_names)
npi = NPI.NPIBase.execute(npi_config=s.npi_config_seir, global_config=config, subpops=s.subpop_struct.subpop_names)

params = s.parameters.parameters_quick_draw(s.n_days, s.nnodes)
params = s.parameters.parameters_reduce(params, npi)
Expand Down
6 changes: 3 additions & 3 deletions flepimop/gempyor_pkg/src/gempyor/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def __init__(
write_parquet = True
self.s = setup.Setup(
setup_name=config["name"].get() + "_" + str(npi_scenario),
spatial_setup=setup.SpatialSetup(
spatial_setup=setup.SubpopulationStructure(
setup_name=config["setup_name"].get(),
geodata_file=spatial_base_path / spatial_config["geodata"].get(),
mobility_file=spatial_base_path / spatial_config["mobility"].get()
Expand Down Expand Up @@ -118,7 +118,7 @@ def __init__(
f""" gempyor >> prefix: {in_prefix};""" # ti: {s.ti}; tf: {s.tf};
)

self.already_built = False # whether we have already build the costly object we just build once.
self.already_built = False # whether we have already build the costly objects that need just one build.

def update_prefix(self, new_prefix, new_out_prefix=None):
self.s.in_prefix = new_prefix
Expand Down Expand Up @@ -374,7 +374,7 @@ def get_seir_parameter_reduced(
parameters = self.s.parameters.parameters_reduce(p_draw, npi_seir)

full_df = pd.DataFrame()
for i, subpop in enumerate(self.s.spatset.subpop_names):
for i, subpop in enumerate(self.s.subpop_struct.subpop_names):
a = pd.DataFrame(
parameters[:, :, i].T,
columns=self.s.parameters.pnames,
Expand Down
58 changes: 29 additions & 29 deletions flepimop/gempyor_pkg/src/gempyor/outcomes.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,14 +72,14 @@ def build_npi_Outcomes(
npi = NPI.NPIBase.execute(
npi_config=s.npi_config_outcomes,
global_config=config,
subpops=s.spatset.subpop_names,
subpops=s.subpop_struct.subpop_names,
loaded_df=loaded_df,
)
else:
npi = NPI.NPIBase.execute(
npi_config=s.npi_config_outcomes,
global_config=config,
subpops=s.spatset.subpop_names,
subpops=s.subpop_struct.subpop_names,
)
return npi

Expand Down Expand Up @@ -135,14 +135,14 @@ def read_parameters_from_config(s: setup.Setup):
"",
end="",
)
branching_data = branching_data[branching_data["subpop"].isin(s.spatset.subpop_names)]
branching_data = branching_data[branching_data["subpop"].isin(s.subpop_struct.subpop_names)]
print(
"Intersect with seir simulation: ",
len(branching_data.subpop.unique()),
"kept",
)

if len(branching_data.subpop.unique()) != len(s.spatset.subpop_names):
if len(branching_data.subpop.unique()) != len(s.subpop_struct.subpop_names):
raise ValueError(
f"Places in seir input files does not correspond to subpops in outcome probability file {branching_file}"
)
Expand Down Expand Up @@ -230,7 +230,7 @@ def read_parameters_from_config(s: setup.Setup):
logging.debug(f"Using 'param_from_file' for relative probability in outcome {class_name}")
# Sort it in case the relative probablity file is mispecified
rel_probability.subpop = rel_probability.subpop.astype("category")
rel_probability.subpop = rel_probability.subpop.cat.set_categories(s.spatset.subpop_names)
rel_probability.subpop = rel_probability.subpop.cat.set_categories(s.subpop_struct.subpop_names)
rel_probability = rel_probability.sort_values(["subpop"])
parameters[class_name]["rel_probability"] = rel_probability["value"].to_numpy()
else:
Expand Down Expand Up @@ -305,8 +305,8 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None
dates = pd.date_range(s.ti, s.tf, freq="D")

outcomes = dataframe_from_array(
np.zeros((len(dates), len(s.spatset.subpop_names)), dtype=int),
s.spatset.subpop_names,
np.zeros((len(dates), len(s.subpop_struct.subpop_names)), dtype=int),
s.subpop_struct.subpop_names,
dates,
"zeros",
).drop("zeros", axis=1)
Expand All @@ -323,16 +323,16 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None
source_array = get_filtered_incidI(
seir_sim,
dates,
s.spatset.subpop_names,
s.subpop_struct.subpop_names,
{"incidence": {"infection_stage": "I1"}},
)
all_data["incidI"] = source_array
outcomes = pd.merge(
outcomes,
dataframe_from_array(source_array, s.spatset.subpop_names, dates, "incidI"),
dataframe_from_array(source_array, s.subpop_struct.subpop_names, dates, "incidI"),
)
elif isinstance(source_name, dict):
source_array = get_filtered_incidI(seir_sim, dates, s.spatset.subpop_names, source_name)
source_array = get_filtered_incidI(seir_sim, dates, s.subpop_struct.subpop_names, source_name)
# we don't keep source in this cases
else: # already defined outcomes
source_array = all_data[source_name]
Expand All @@ -347,13 +347,13 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None
].to_numpy()
else:
probabilities = parameters[new_comp]["probability"].as_random_distribution()(
size=len(s.spatset.subpop_names)
size=len(s.subpop_struct.subpop_names)
) # one draw per subpop
if "rel_probability" in parameters[new_comp]:
probabilities = probabilities * parameters[new_comp]["rel_probability"]

delays = parameters[new_comp]["delay"].as_random_distribution()(
size=len(s.spatset.subpop_names)
size=len(s.subpop_struct.subpop_names)
) # one draw per subpop
probabilities[probabilities > 1] = 1
probabilities[probabilities < 0] = 0
Expand All @@ -366,18 +366,18 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None
hpar,
pd.DataFrame.from_dict(
{
"subpop": s.spatset.subpop_names,
"quantity": ["probability"] * len(s.spatset.subpop_names),
"outcome": [new_comp] * len(s.spatset.subpop_names),
"value": probabilities[0] * np.ones(len(s.spatset.subpop_names)),
"subpop": s.subpop_struct.subpop_names,
"quantity": ["probability"] * len(s.subpop_struct.subpop_names),
"outcome": [new_comp] * len(s.subpop_struct.subpop_names),
"value": probabilities[0] * np.ones(len(s.subpop_struct.subpop_names)),
}
),
pd.DataFrame.from_dict(
{
"subpop": s.spatset.subpop_names,
"quantity": ["delay"] * len(s.spatset.subpop_names),
"outcome": [new_comp] * len(s.spatset.subpop_names),
"value": delays[0] * np.ones(len(s.spatset.subpop_names)),
"subpop": s.subpop_struct.subpop_names,
"quantity": ["delay"] * len(s.subpop_struct.subpop_names),
"outcome": [new_comp] * len(s.subpop_struct.subpop_names),
"value": delays[0] * np.ones(len(s.subpop_struct.subpop_names)),
}
),
],
Expand Down Expand Up @@ -407,7 +407,7 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None
stoch_delay_flag = False
all_data[new_comp] = multishift(all_data[new_comp], delays, stoch_delay_flag=stoch_delay_flag)
# Produce a dataframe an merge it
df_p = dataframe_from_array(all_data[new_comp], s.spatset.subpop_names, dates, new_comp)
df_p = dataframe_from_array(all_data[new_comp], s.subpop_struct.subpop_names, dates, new_comp)
outcomes = pd.merge(outcomes, df_p)

# Make duration
Expand All @@ -418,7 +418,7 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None
]["value"].to_numpy()
else:
durations = parameters[new_comp]["duration"].as_random_distribution()(
size=len(s.spatset.subpop_names)
size=len(s.subpop_struct.subpop_names)
) # one draw per subpop
durations = np.repeat(durations[:, np.newaxis], len(dates), axis=1).T # duplicate in time
durations = np.round(durations).astype(int)
Expand All @@ -428,10 +428,10 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None
hpar,
pd.DataFrame.from_dict(
{
"subpop": s.spatset.subpop_names,
"quantity": ["duration"] * len(s.spatset.subpop_names),
"outcome": [new_comp] * len(s.spatset.subpop_names),
"value": durations[0] * np.ones(len(s.spatset.subpop_names)),
"subpop": s.subpop_struct.subpop_names,
"quantity": ["duration"] * len(s.subpop_struct.subpop_names),
"outcome": [new_comp] * len(s.subpop_struct.subpop_names),
"value": durations[0] * np.ones(len(s.subpop_struct.subpop_names)),
}
),
],
Expand Down Expand Up @@ -465,22 +465,22 @@ def compute_all_multioutcomes(*, s, sim_id2write, parameters, loaded_values=None

df_p = dataframe_from_array(
all_data[parameters[new_comp]["duration_name"]],
s.spatset.subpop_names,
s.subpop_struct.subpop_names,
dates,
parameters[new_comp]["duration_name"],
)
outcomes = pd.merge(outcomes, df_p)

elif "sum" in parameters[new_comp]:
sum_outcome = np.zeros(
(len(dates), len(s.spatset.subpop_names)),
(len(dates), len(s.subpop_struct.subpop_names)),
dtype=all_data[parameters[new_comp]["sum"][0]].dtype,
)
# Sum all concerned compartment.
for cmp in parameters[new_comp]["sum"]:
sum_outcome += all_data[cmp]
all_data[new_comp] = sum_outcome
df_p = dataframe_from_array(sum_outcome, s.spatset.subpop_names, dates, new_comp)
df_p = dataframe_from_array(sum_outcome, s.subpop_struct.subpop_names, dates, new_comp)
outcomes = pd.merge(outcomes, df_p)

return outcomes, hpar
Expand Down
10 changes: 5 additions & 5 deletions flepimop/gempyor_pkg/src/gempyor/seeding_ic.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def _DataFrame2NumbaDict(df, amounts, setup) -> nb.typed.Dict:
n_seeding_ignored_before = 0
n_seeding_ignored_after = 0
for idx, (row_index, row) in enumerate(df.iterrows()):
if row["subpop"] not in setup.spatset.subpop_names:
if row["subpop"] not in setup.subpop_struct.subpop_names:
raise ValueError(
f"Invalid subpop '{row['subpop']}' in row {row_index + 1} of seeding::lambda_file. Not found in geodata."
)
Expand All @@ -49,7 +49,7 @@ def _DataFrame2NumbaDict(df, amounts, setup) -> nb.typed.Dict:
destination_dict = {grp_name: row[f"destination_{grp_name}"] for grp_name in cmp_grp_names}
seeding_dict["seeding_sources"][idx] = setup.compartments.get_comp_idx(source_dict)
seeding_dict["seeding_destinations"][idx] = setup.compartments.get_comp_idx(destination_dict)
seeding_dict["seeding_subpops"][idx] = setup.spatset.subpop_names.index(row["subpop"])
seeding_dict["seeding_subpops"][idx] = setup.subpop_struct.subpop_names.index(row["subpop"])
seeding_amounts[idx] = amounts[idx]
else:
n_seeding_ignored_after += 1
Expand Down Expand Up @@ -113,7 +113,7 @@ def draw_ic(self, sim_id: int, setup) -> np.ndarray:
)

y0 = np.zeros((setup.compartments.compartments.shape[0], setup.nnodes))
for pl_idx, pl in enumerate(setup.spatset.subpop_names): #
for pl_idx, pl in enumerate(setup.subpop_struct.subpop_names): #
if pl in list(ic_df["subpop"]):
states_pl = ic_df[ic_df["subpop"] == pl]
for comp_idx, comp_name in setup.compartments.compartments["name"].items():
Expand Down Expand Up @@ -204,7 +204,7 @@ def draw_ic(self, sim_id: int, setup) -> np.ndarray:
f"WARNING: init file mc_name {ic_df_compartment['mc_name'].iloc[0]} does not match compartment mc_name {comp_name}"
)

for pl_idx, pl in enumerate(setup.spatset.subpop_names):
for pl_idx, pl in enumerate(setup.subpop_struct.subpop_names):
if pl in ic_df.columns:
y0[comp_idx, pl_idx] = float(ic_df_compartment[pl])
elif allow_missing_nodes:
Expand Down Expand Up @@ -237,7 +237,7 @@ def draw_ic(self, sim_id: int, setup) -> np.ndarray:

# check that the inputed values sums to the node_population:
error = False
for pl_idx, pl in enumerate(setup.spatset.subpop_names):
for pl_idx, pl in enumerate(setup.subpop_struct.subpop_names):
n_y0 = y0[:, pl_idx].sum()
n_pop = setup.popnodes[pl_idx]
if abs(n_y0 - n_pop) > 1:
Expand Down
8 changes: 4 additions & 4 deletions flepimop/gempyor_pkg/src/gempyor/seir.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,15 +171,15 @@ def build_npi_SEIR(s, load_ID, sim_id2load, config, bypass_DF=None, bypass_FN=No
npi = NPI.NPIBase.execute(
npi_config=s.npi_config_seir,
global_config=config,
subpops=s.spatset.subpop_names,
subpops=s.subpop_struct.subpop_names,
loaded_df=loaded_df,
pnames_overlap_operation_sum=s.parameters.intervention_overlap_operation["sum"],
)
else:
npi = NPI.NPIBase.execute(
npi_config=s.npi_config_seir,
global_config=config,
subpops=s.spatset.subpop_names,
subpops=s.subpop_struct.subpop_names,
pnames_overlap_operation_sum=s.parameters.intervention_overlap_operation["sum"],
)
return npi
Expand Down Expand Up @@ -293,7 +293,7 @@ def states2Df(s, states):
prev_df = pd.DataFrame(
data=states_prev.reshape(s.n_days * s.compartments.get_ncomp(), s.nnodes),
index=ts_index,
columns=s.spatset.subpop_names,
columns=s.subpop_struct.subpop_names,
).reset_index()
prev_df = pd.merge(
left=s.compartments.get_compartments_explicitDF(),
Expand All @@ -311,7 +311,7 @@ def states2Df(s, states):
incid_df = pd.DataFrame(
data=states_incid.reshape(s.n_days * s.compartments.get_ncomp(), s.nnodes),
index=ts_index,
columns=s.spatset.subpop_names,
columns=s.subpop_struct.subpop_names,
).reset_index()
incid_df = pd.merge(
left=s.compartments.get_compartments_explicitDF(),
Expand Down
12 changes: 6 additions & 6 deletions flepimop/gempyor_pkg/src/gempyor/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,11 +74,11 @@ def __init__(
self.first_sim_index = first_sim_index
self.outcome_scenario = outcome_scenario

self.spatset = spatial_setup
self.subpop_struct = spatial_setup
self.n_days = (self.tf - self.ti).days + 1 # because we include s.ti and s.tf
self.nnodes = self.spatset.nnodes
self.popnodes = self.spatset.popnodes
self.mobility = self.spatset.mobility
self.nnodes = self.subpop_struct.nnodes
self.popnodes = self.subpop_struct.popnodes
self.mobility = self.subpop_struct.mobility

self.stoch_traj_flag = stoch_traj_flag

Expand Down Expand Up @@ -117,7 +117,7 @@ def __init__(
parameter_config=self.parameters_config,
ti=self.ti,
tf=self.tf,
subpop_names=self.spatset.subpop_names,
subpop_names=self.subpop_struct.subpop_names,
)
self.seedingAndIC = seeding_ic.SeedingAndIC(
seeding_config=self.seeding_config,
Expand Down Expand Up @@ -240,7 +240,7 @@ def write_simID(
return fname


class SpatialSetup:
class SubpopulationStructure:
def __init__(self, *, setup_name, geodata_file, mobility_file, popnodes_key, subpop_names_key):
self.setup_name = setup_name
self.data = pd.read_csv(
Expand Down
2 changes: 1 addition & 1 deletion flepimop/gempyor_pkg/src/gempyor/simulate_outcome.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def simulate(
nslots = config["nslots"].as_number()
print(f"Simulations to be run: {nslots}")

spatial_setup = setup.SpatialSetup(
spatial_setup = setup.SubpopulationStructure(
setup_name=config["setup_name"].get(),
geodata_file=spatial_base_path / spatial_config["geodata"].get(),
mobility_file=spatial_base_path / spatial_config["mobility"].get()
Expand Down
2 changes: 1 addition & 1 deletion flepimop/gempyor_pkg/src/gempyor/simulate_seir.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ def simulate(
if not nslots:
nslots = config["nslots"].as_number()

spatial_setup = setup.SpatialSetup(
spatial_setup = setup.SubpopulationStructure(
setup_name=config["setup_name"].get(),
geodata_file=spatial_base_path / spatial_config["geodata"].get(),
mobility_file=spatial_base_path / spatial_config["mobility"].get()
Expand Down
2 changes: 1 addition & 1 deletion flepimop/gempyor_pkg/tests/npi/test_npis.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def test_spatial_groups():
# all the same: r2
df = npi_df[npi_df["npi_name"] == "all_together"]
assert len(df) == 1
assert set(df["subpop"].iloc[0].split(",")) == set(inference_simulator.s.spatset.subpop_names)
assert set(df["subpop"].iloc[0].split(",")) == set(inference_simulator.s.subpop_struct.subpop_names)
assert len(df["subpop"].iloc[0].split(",")) == inference_simulator.s.nnodes

# two groups: r3
Expand Down
2 changes: 1 addition & 1 deletion flepimop/gempyor_pkg/tests/seir/test_compartments.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def test_Setup_has_compartments_component():
config.read(user=False)
config.set_file(f"{DATA_DIR}/config.yml")

ss = setup.SpatialSetup(
ss = setup.SubpopulationStructure(
setup_name="test_values",
geodata_file=f"{DATA_DIR}/geodata.csv",
mobility_file=f"{DATA_DIR}/mobility.txt",
Expand Down
Loading

0 comments on commit d92ec18

Please sign in to comment.