Skip to content

Commit

Permalink
Add periods.
Browse files Browse the repository at this point in the history
  • Loading branch information
ioannis-vm committed Jun 22, 2024
1 parent 550fbb8 commit b4b3a2b
Show file tree
Hide file tree
Showing 29 changed files with 412 additions and 154 deletions.
8 changes: 5 additions & 3 deletions .github/workflows/format_and_lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,17 @@ jobs:
python -m pip install --upgrade pip
pip install black flake8
- name: Run Black
run: black -S .
uses: psf/black@stable
with:
options: "-S"
- name: Commit changes
run: |
git config --global user.name 'GitHubBot'
git config --global user.email '[email protected]'
git add .
if git diff-index --quiet HEAD --; then
echo "No changes to commit"
echo "No changes to commit."
else
git commit -m "Format code with Black"
git commit -m "Format code with Black."
fi
git push
8 changes: 6 additions & 2 deletions pelicun/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,9 @@ def add_warning(self, msg: str) -> None:
msg_lines = msg.split('\n')
formatted_msg = '\n'
for msg_line in msg_lines:
formatted_msg += self.spaces + Fore.RED + msg_line + Style.RESET_ALL + '\n'
formatted_msg += (
self.spaces + Fore.RED + msg_line + Style.RESET_ALL + '\n'
)
if formatted_msg not in self.warning_stack:
self.warning_stack.append(formatted_msg)

Expand Down Expand Up @@ -893,7 +895,9 @@ def show_matrix(data, use_describe=False):
"""
if use_describe:
pp.pprint(pd.DataFrame(data).describe(percentiles=[0.01, 0.1, 0.5, 0.9, 0.99]))
pp.pprint(
pd.DataFrame(data).describe(percentiles=[0.01, 0.1, 0.5, 0.9, 0.99])
)
else:
pp.pprint(pd.DataFrame(data))

Expand Down
111 changes: 75 additions & 36 deletions pelicun/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,9 @@ def create_FEMA_P58_fragility_db(
ls_meta.update(
{
f"DS{ds_id}": {
"Description": cmp_meta[f"DS_{ds_id}_Description"],
"Description": cmp_meta[
f"DS_{ds_id}_Description"
],
"RepairAction": repair_action,
}
}
Expand Down Expand Up @@ -1022,7 +1024,9 @@ def create_FEMA_P58_repair_db(
time_vals[5] > 0
)

df_db.loc[(cmp.Index, 'Carbon'), f'DS{DS_i}-Family'] = family_hat_carbon
df_db.loc[(cmp.Index, 'Carbon'), f'DS{DS_i}-Family'] = (
family_hat_carbon
)

df_db.loc[(cmp.Index, 'Carbon'), f'DS{DS_i}-Theta_0'] = (
f"{carbon_theta[0]:g}"
Expand All @@ -1032,7 +1036,9 @@ def create_FEMA_P58_repair_db(
f"{carbon_theta[1]:g}"
)

df_db.loc[(cmp.Index, 'Energy'), f'DS{DS_i}-Family'] = family_hat_energy
df_db.loc[(cmp.Index, 'Energy'), f'DS{DS_i}-Family'] = (
family_hat_energy
)

df_db.loc[(cmp.Index, 'Energy'), f'DS{DS_i}-Theta_0'] = (
f"{energy_theta[0]:g}"
Expand Down Expand Up @@ -1067,7 +1073,8 @@ def create_FEMA_P58_repair_db(
meta_data['DamageStates'].update(
{
f"DS{DS_i}": {
"Description": 'Combination of ' + ' & '.join(ds_combo),
"Description": 'Combination of '
+ ' & '.join(ds_combo),
"RepairAction": 'Combination of pure DS repair '
'actions.',
}
Expand All @@ -1080,9 +1087,9 @@ def create_FEMA_P58_repair_db(
for DS_i in range(1, 6):
# cost
if not pd.isna(getattr(cmp, f'Best_Fit_DS{DS_i}')):
df_db.loc[(cmp.Index, 'Cost'), f'DS{DS_i}-Family'] = convert_family[
getattr(cmp, f'Best_Fit_DS{DS_i}')
]
df_db.loc[(cmp.Index, 'Cost'), f'DS{DS_i}-Family'] = (
convert_family[getattr(cmp, f'Best_Fit_DS{DS_i}')]
)

if not pd.isna(getattr(cmp, f'Lower_Qty_Mean_DS{DS_i}')):
theta_0_low = getattr(cmp, f'Lower_Qty_Mean_DS{DS_i}')
Expand All @@ -1091,7 +1098,9 @@ def create_FEMA_P58_repair_db(
qnt_up = getattr(cmp, f'Upper_Qty_Cutoff_DS{DS_i}')

if theta_0_low == 0.0 and theta_0_up == 0.0:
df_db.loc[(cmp.Index, 'Cost'), f'DS{DS_i}-Family'] = np.nan
df_db.loc[(cmp.Index, 'Cost'), f'DS{DS_i}-Family'] = (
np.nan
)

else:
df_db.loc[(cmp.Index, 'Cost'), f'DS{DS_i}-Theta_0'] = (
Expand Down Expand Up @@ -1121,9 +1130,9 @@ def create_FEMA_P58_repair_db(

# time
if not pd.isna(getattr(cmp, f'Best_Fit_DS{DS_i}_1')):
df_db.loc[(cmp.Index, 'Time'), f'DS{DS_i}-Family'] = convert_family[
getattr(cmp, f'Best_Fit_DS{DS_i}_1')
]
df_db.loc[(cmp.Index, 'Time'), f'DS{DS_i}-Family'] = (
convert_family[getattr(cmp, f'Best_Fit_DS{DS_i}_1')]
)

if not pd.isna(getattr(cmp, f'Lower_Qty_Mean_DS{DS_i}_1')):
theta_0_low = getattr(cmp, f'Lower_Qty_Mean_DS{DS_i}_1')
Expand All @@ -1132,7 +1141,9 @@ def create_FEMA_P58_repair_db(
qnt_up = getattr(cmp, f'Upper_Qty_Cutoff_DS{DS_i}_1')

if theta_0_low == 0.0 and theta_0_up == 0.0:
df_db.loc[(cmp.Index, 'Time'), f'DS{DS_i}-Family'] = np.nan
df_db.loc[(cmp.Index, 'Time'), f'DS{DS_i}-Family'] = (
np.nan
)

else:
df_db.loc[(cmp.Index, 'Time'), f'DS{DS_i}-Theta_0'] = (
Expand All @@ -1144,8 +1155,8 @@ def create_FEMA_P58_repair_db(
f"{getattr(cmp, f'CV__Dispersion_DS{DS_i}_2'):g}"
)

df_db.loc[(cmp.Index, 'Time'), f'DS{DS_i}-LongLeadTime'] = int(
getattr(cmp, f'DS_{DS_i}_Long_Lead_Time') == 'YES'
df_db.loc[(cmp.Index, 'Time'), f'DS{DS_i}-LongLeadTime'] = (
int(getattr(cmp, f'DS_{DS_i}_Long_Lead_Time') == 'YES')
)

else:
Expand Down Expand Up @@ -1279,9 +1290,13 @@ def create_Hazus_EQ_fragility_db(
frag_meta = {}

# prepare lists of labels for various building features
design_levels = list(raw_data['Structural_Fragility_Groups']['EDP_limits'].keys())
design_levels = list(
raw_data['Structural_Fragility_Groups']['EDP_limits'].keys()
)

building_types = list(raw_data['Structural_Fragility_Groups']['P_collapse'].keys())
building_types = list(
raw_data['Structural_Fragility_Groups']['P_collapse'].keys()
)

convert_design_level = {
'High_code': 'HC',
Expand Down Expand Up @@ -1383,7 +1398,9 @@ def create_Hazus_EQ_fragility_db(
"Description": (
frag_meta['Meta']['Collections']['STR']['Description']
+ ", "
+ frag_meta['Meta']['StructuralSystems'][st]['Description']
+ frag_meta['Meta']['StructuralSystems'][st][
'Description'
]
+ ", "
+ frag_meta['Meta']['HeightClasses'][hc]['Description']
+ ", "
Expand Down Expand Up @@ -1411,7 +1428,9 @@ def create_Hazus_EQ_fragility_db(
"Description": (
frag_meta['Meta']['Collections']['STR']['Description']
+ ", "
+ frag_meta['Meta']['StructuralSystems'][st]['Description']
+ frag_meta['Meta']['StructuralSystems'][st][
'Description'
]
+ ", "
+ frag_meta['Meta']['DesignLevels'][
convert_design_level[dl]
Expand All @@ -1435,12 +1454,12 @@ def create_Hazus_EQ_fragility_db(
ds_meta = frag_meta['Meta']['StructuralSystems'][st]['DamageStates']
for LS_i in range(1, 5):
df_db.loc[counter, f'LS{LS_i}-Family'] = 'lognormal'
df_db.loc[counter, f'LS{LS_i}-Theta_0'] = S_data['EDP_limits'][dl][
bt
][LS_i - 1]
df_db.loc[counter, f'LS{LS_i}-Theta_1'] = S_data['Fragility_beta'][
df_db.loc[counter, f'LS{LS_i}-Theta_0'] = S_data['EDP_limits'][
dl
]
][bt][LS_i - 1]
df_db.loc[counter, f'LS{LS_i}-Theta_1'] = S_data[
'Fragility_beta'
][dl]

if LS_i == 4:
p_coll = S_data['P_collapse'][bt]
Expand All @@ -1461,7 +1480,9 @@ def create_Hazus_EQ_fragility_db(
cmp_meta["LimitStates"].update(
{
f"LS{LS_i}": {
f"DS{LS_i}": {"Description": ds_meta[f"DS{LS_i}"]}
f"DS{LS_i}": {
"Description": ds_meta[f"DS{LS_i}"]
}
}
}
)
Expand Down Expand Up @@ -1537,7 +1558,9 @@ def create_Hazus_EQ_fragility_db(
"Comments": (
frag_meta['Meta']['Collections']['NSA']['Comment']
+ "\n"
+ frag_meta['Meta']['DesignLevels'][convert_design_level[dl]]['Comment']
+ frag_meta['Meta']['DesignLevels'][convert_design_level[dl]][
'Comment'
]
),
"SuggestedComponentBlockSize": "1 EA",
"RoundUpToIntegerQuantity": "True",
Expand Down Expand Up @@ -1599,7 +1622,9 @@ def create_Hazus_EQ_fragility_db(
'Description'
]
+ ", "
+ frag_meta['Meta']['HeightClasses'][hc]['Description']
+ frag_meta['Meta']['HeightClasses'][hc][
'Description'
]
+ ", "
+ frag_meta['Meta']['DesignLevels'][
convert_design_level[dl]
Expand All @@ -1608,7 +1633,9 @@ def create_Hazus_EQ_fragility_db(
"Comments": (
frag_meta['Meta']['Collections']['LF']['Comment']
+ "\n"
+ frag_meta['Meta']['StructuralSystems'][st]['Comment']
+ frag_meta['Meta']['StructuralSystems'][st][
'Comment'
]
+ "\n"
+ frag_meta['Meta']['HeightClasses'][hc]['Comment']
+ "\n"
Expand Down Expand Up @@ -1636,7 +1663,9 @@ def create_Hazus_EQ_fragility_db(
"Comments": (
frag_meta['Meta']['Collections']['LF']['Comment']
+ "\n"
+ frag_meta['Meta']['StructuralSystems'][st]['Comment']
+ frag_meta['Meta']['StructuralSystems'][st][
'Comment'
]
+ "\n"
+ frag_meta['Meta']['DesignLevels'][
convert_design_level[dl]
Expand All @@ -1648,12 +1677,14 @@ def create_Hazus_EQ_fragility_db(
}

# store the Limit State parameters
ds_meta = frag_meta['Meta']['StructuralSystems'][st]['DamageStates']
ds_meta = frag_meta['Meta']['StructuralSystems'][st][
'DamageStates'
]
for LS_i in range(1, 5):
df_db.loc[counter, f'LS{LS_i}-Family'] = 'lognormal'
df_db.loc[counter, f'LS{LS_i}-Theta_0'] = LF_data['EDP_limits'][
dl
][bt][LS_i - 1]
df_db.loc[counter, f'LS{LS_i}-Theta_0'] = LF_data[
'EDP_limits'
][dl][bt][LS_i - 1]
df_db.loc[counter, f'LS{LS_i}-Theta_1'] = LF_data[
'Fragility_beta'
][dl]
Expand Down Expand Up @@ -1836,7 +1867,9 @@ def create_Hazus_EQ_repair_db(
# create the MultiIndex
cmp_types = ['STR', 'NSD', 'NSA', 'LF']
comps = [
f'{cmp_type}.{occ_type}' for cmp_type in cmp_types for occ_type in occupancies
f'{cmp_type}.{occ_type}'
for cmp_type in cmp_types
for occ_type in occupancies
]
DVs = ['Cost', 'Time']
df_MI = pd.MultiIndex.from_product([comps, DVs], names=['ID', 'DV'])
Expand Down Expand Up @@ -2271,9 +2304,15 @@ def create_Hazus_HU_fragility_db(
'Masonry, Engineered Residential Building, High-Rise (6+ Stories).'
),
# ------------------------
'M.ECB.L': ('Masonry, Engineered Commercial Building, Low-Rise (1-2 Stories).'),
'M.ECB.M': ('Masonry, Engineered Commercial Building, Mid-Rise (3-5 Stories).'),
'M.ECB.H': ('Masonry, Engineered Commercial Building, High-Rise (6+ Stories).'),
'M.ECB.L': (
'Masonry, Engineered Commercial Building, Low-Rise (1-2 Stories).'
),
'M.ECB.M': (
'Masonry, Engineered Commercial Building, Mid-Rise (3-5 Stories).'
),
'M.ECB.H': (
'Masonry, Engineered Commercial Building, High-Rise (6+ Stories).'
),
# ------------------------
'C.ERB.L': (
'Concrete, Engineered Residential Building, Low-Rise (1-2 Stories).'
Expand Down
12 changes: 9 additions & 3 deletions pelicun/file_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,16 +407,22 @@ def load_data(

conversion_factors = units.map(
lambda unit: (
1.00 if pd.isna(unit) else unit_conversion_factors.get(unit, 1.00)
1.00
if pd.isna(unit)
else unit_conversion_factors.get(unit, 1.00)
)
)

if orientation == 1:
data.loc[:, numeric_elements] = data.loc[:, numeric_elements].multiply(
data.loc[:, numeric_elements] = data.loc[
:, numeric_elements
].multiply(
conversion_factors, axis=axis[orientation]
) # type: ignore
else:
data.loc[numeric_elements, :] = data.loc[numeric_elements, :].multiply(
data.loc[numeric_elements, :] = data.loc[
numeric_elements, :
].multiply(
conversion_factors, axis=axis[orientation]
) # type: ignore

Expand Down
15 changes: 11 additions & 4 deletions pelicun/model/asset_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,9 @@ def get_attribute(attribute_str, dtype=float, default=np.nan):
cmp_marginal_param_series = []
for col, cmp_marginal_param in cmp_marginal_param_dct.items():
cmp_marginal_param_series.append(
pd.Series(cmp_marginal_param, dtype=dtypes[col], name=col, index=index)
pd.Series(
cmp_marginal_param, dtype=dtypes[col], name=col, index=index
)
)

cmp_marginal_params = pd.concat(cmp_marginal_param_series, axis=1)
Expand Down Expand Up @@ -390,7 +392,9 @@ def get_attribute(attribute_str, dtype=float, default=np.nan):

self.cmp_marginal_params = cmp_marginal_params.drop('Units', axis=1)

self.log.msg("Model parameters successfully loaded.", prepend_timestamp=False)
self.log.msg(
"Model parameters successfully loaded.", prepend_timestamp=False
)

self.log.msg(
"\nComponent model marginal distributions:\n" + str(cmp_marginal_params),
Expand All @@ -399,7 +403,9 @@ def get_attribute(attribute_str, dtype=float, default=np.nan):

# the empirical data and correlation files can be added later, if needed

def list_unique_component_ids(self, as_set: bool = False) -> list[str] | set[str]:
def list_unique_component_ids(
self, as_set: bool = False
) -> list[str] | set[str]:
"""
Returns unique component IDs.
Expand Down Expand Up @@ -496,7 +502,8 @@ def _create_cmp_RVs(self) -> None:
uq.rv_class_map(family)(
name=f'CMP-{cmp[0]}-{cmp[1]}-{cmp[2]}-{cmp[3]}',
theta=[
getattr(rv_params, f"Theta_{t_i}", np.nan) for t_i in range(3)
getattr(rv_params, f"Theta_{t_i}", np.nan)
for t_i in range(3)
],
truncation_limits=[
getattr(rv_params, f"Truncate{side}", np.nan)
Expand Down
Loading

0 comments on commit b4b3a2b

Please sign in to comment.