Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
zsarnoczay committed Nov 27, 2024
2 parents 2093e3b + a0506c1 commit 99eacdd
Show file tree
Hide file tree
Showing 9 changed files with 429 additions and 68 deletions.
2 changes: 1 addition & 1 deletion pelicun/file_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ def substitute_default_path(

def load_data( # noqa: C901
data_source: str | pd.DataFrame,
unit_conversion_factors: dict | None,
unit_conversion_factors: dict | None = None,
orientation: int = 0,
*,
reindex: bool = True,
Expand Down
208 changes: 157 additions & 51 deletions pelicun/model/damage_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@

from __future__ import annotations

from collections import defaultdict
from functools import partial
from pathlib import Path
from typing import TYPE_CHECKING
Expand Down Expand Up @@ -906,6 +907,41 @@ def _handle_operation( # noqa: PLR6301
msg = f'Invalid operation: `{operation}`'
raise ValueError(msg)

def _handle_operation_list(
self, initial_value: float, operations: list[tuple[str, float]]
) -> np.ndarray:
"""
Apply one or more operations to an initial value and return the results.
Parameters.
----------
initial_value : float
The initial value to which the operations will be applied.
operations : list of tuple
A list of operations where each operation is represented as a tuple.
The first element of the tuple is a string representing the operation
type, and the second element is a float representing the value to be
used in the operation.
Returns
-------
np.ndarray
An array of results after applying each operation to the initial value.
"""
if len(operations) == 1:
return np.array(
[
self._handle_operation(
initial_value, operations[0][0], operations[0][1]
)
]
)
new_values = [
self._handle_operation(initial_value, operation[0], operation[1])
for operation in operations
]
return np.array(new_values)

def _generate_dmg_sample(
self,
sample_size: int,
Expand Down Expand Up @@ -1160,13 +1196,18 @@ def _create_dmg_RVs( # noqa: N802, C901
A DataFrame that groups performance groups into batches
for efficient damage assessment.
scaling_specification: dict, optional
A dictionary defining the shift in median.
Example: {'CMP-1-1': '*1.2', 'CMP-1-2': '/1.4'}
The keys are individual components that should be present
in the `capacity_sample`. The values should be strings
containing an operation followed by the value formatted as
a float. The operation can be '+' for addition, '-' for
subtraction, '*' for multiplication, and '/' for division.
A dictionary defining the shift in median.
Example: {'CMP-1-1': {'LS1':['*1.2'. '*0.8'], 'LS2':'*1.2'},
'CMP-1-2': {'ALL':'/1.4'}} The first level keys are individual
components that should be present in the `capacity_sample`. The
second level key is the limit state to apply the scaling to. The
values should be strings or list of strings. The strings should
contain an operation followed by the value formatted as a float.
The operation can be '+' for addition, '-' for subtraction, '*'
for multiplication, and '/' for division. If different operations
are required for different realizations, a list of strings can
be provided. When 'ALL' is used as the key, the operation will
be applied to all limit states.
Returns
-------
Expand All @@ -1175,15 +1216,6 @@ def _create_dmg_RVs( # noqa: N802, C901
one for the capacity random variables and one for the LSDS
assignments.
Raises
------
ValueError
Raises an error if the scaling specification is invalid or
if the input DataFrame does not meet the expected format.
TypeError
If there are any issues with the types of the data in the
input DataFrame.
"""

def assign_lsds(
Expand Down Expand Up @@ -1296,6 +1328,90 @@ def map_ds(values: np.ndarray, offset: int) -> np.ndarray:

return ds_id

def parse_scaling_specification(scaling_specification: dict) -> dict: # noqa: C901
"""
Parse and validate the scaling specification, used in the '_create_dmg_RVs' method.
Parameters
----------
scaling_specification: dict, optional
A dictionary defining the shift in median.
Example: {'CMP-1-1': {'LS1':['*1.2'. '*0.8'], 'LS2':'*1.2'},
'CMP-1-2': {'ALL':'/1.4'}} The first level keys are individual
components that should be present in the `capacity_sample`. The
second level key is the limit state to apply the scaling to. The
values should be strings or list of strings. The strings should
containing an operation followed by the value formatted as
a float. The operation can be '+' for addition, '-' for
subtraction, '*' for multiplication, and '/' for division. If
different operations are required for different realizations, a
list of strings can be provided. When 'ALL' is used as the key,
the operation will be applied to all limit states.
Returns
-------
dict
The parsed and validated scaling specification.
Raises
------
ValueError
If the scaling specification is invalid.
TypeError
If the type of an entry is invalid.
"""
# if there are contents, ensure they are valid.
# See docstring for an example of what is expected.
parsed_scaling_specification = defaultdict(dict)
# validate contents
for key, value in scaling_specification.items():
# loop through limit states
if 'ALL' in value:
if len(value) > 1:
msg = (
f'Invalid entry in scaling_specification: '
f"{value}. No other entries are allowed for a component when 'ALL' is used."
)
raise ValueError(msg)
for limit_state_id, specifics in value.items():
if not (
limit_state_id.startswith('LS') or limit_state_id == 'ALL'
):
msg = (
f'Invalid entry in scaling_specification: {limit_state_id}. '
f"It has to start with 'LS' or be 'ALL'. "
f'See docstring of DamageModel._create_dmg_RVs.'
)
raise ValueError(msg)
css = 'capacity adjustment specification'
if not isinstance(specifics, list):
specifics_list = [specifics]
else:
specifics_list = specifics
for spec in specifics_list:
if not isinstance(spec, str):
msg = (
f'Invalud entry in {css}: {spec}.'
f'The specified scaling operation has to be a string.'
f'See docstring of DamageModel._create_dmg_RVs.'
)
raise TypeError(msg)
capacity_adjustment_operation = spec[0]
number = spec[1::]
if capacity_adjustment_operation not in {'+', '-', '*', '/'}:
msg = f'Invalid operation in {css}: '
raise ValueError(msg, f'{capacity_adjustment_operation}')
fnumber = base.float_or_None(number)
if fnumber is None:
msg = f'Invalid number in {css}: {number}'
raise ValueError(msg)
if limit_state_id not in parsed_scaling_specification[key]:
parsed_scaling_specification[key][limit_state_id] = []
parsed_scaling_specification[key][limit_state_id].append(
(capacity_adjustment_operation, fnumber)
)
return parsed_scaling_specification

if self._asmnt.log.verbose:
self.log.msg('Generating capacity variables ...', prepend_timestamp=True)

Expand All @@ -1308,38 +1424,12 @@ def map_ds(values: np.ndarray, offset: int) -> np.ndarray:
if not scaling_specification:
scaling_specification = {}
else:
# if there are contents, ensure they are valid.
# See docstring for an example of what is expected.
parsed_scaling_specification = {}
# validate contents
for key, value in scaling_specification.items():
css = 'capacity adjustment specification'
if not isinstance(value, str):
msg = (
f'Invalid entry in {css}: {value}. It has to be a string. '
f'See docstring of DamageModel._create_dmg_RVs.'
)
raise TypeError(msg)
capacity_adjustment_operation = value[0]
number = value[1::]
if capacity_adjustment_operation not in {'+', '-', '*', '/'}:
msg = (
f'Invalid operation in {css}: '
f'{capacity_adjustment_operation}'
)
raise ValueError(msg)
fnumber = base.float_or_None(number)
if fnumber is None:
msg = f'Invalid number in {css}: {number}'
raise ValueError(msg)
parsed_scaling_specification[key] = (
capacity_adjustment_operation,
fnumber,
)
scaling_specification = parsed_scaling_specification
scaling_specification = parse_scaling_specification(
scaling_specification
)

# get the component sample and blocks from the asset model
for pg in pgb.index:
for pg in pgb.index: # noqa: PLR1702
# determine demand capacity adjustment operation, if required
cmp_loc_dir = '-'.join(pg[0:3])
capacity_adjustment_operation = scaling_specification.get( # type: ignore
Expand Down Expand Up @@ -1396,11 +1486,27 @@ def map_ds(values: np.ndarray, offset: int) -> np.ndarray:

if capacity_adjustment_operation:
if family in {'normal', 'lognormal', 'deterministic'}:
theta[0] = self._handle_operation(
theta[0],
capacity_adjustment_operation[0],
float(capacity_adjustment_operation[1]),
)
# Only scale the median value if ls_id is defined in capacity_adjustment_operation
# Otherwise, use the original value
new_theta_0 = None
if 'ALL' in capacity_adjustment_operation:
new_theta_0 = self._handle_operation_list(
theta[0],
capacity_adjustment_operation['ALL'],
)
elif f'LS{ls_id}' in capacity_adjustment_operation:
new_theta_0 = self._handle_operation_list(
theta[0],
capacity_adjustment_operation[f'LS{ls_id}'],
)
if new_theta_0 is not None:
if new_theta_0.size == 1:
theta[0] = new_theta_0[0]
else:
# Repeat the theta values new_theta_0.size times along axis 0
# and 1 time along axis 1
theta = np.tile(theta, (new_theta_0.size, 1))
theta[:, 0] = new_theta_0
else:
self.log.warning(
f'Capacity adjustment is only supported '
Expand Down
4 changes: 2 additions & 2 deletions pelicun/resources/SimCenterDBDL/damage_DB_Hazus_EQ_trnsp.csv
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ HWB.GS.25,0,Spectral Acceleration|1.0,g,0,0,lognormal,0.3,0.6,,lognormal,0.5,0.6
HWB.GS.26,0,Spectral Acceleration|1.0,g,0,0,lognormal,0.75,0.6,,lognormal,0.75,0.6,,lognormal,0.75,0.6,,lognormal,1.1,0.6,
HWB.GS.27,0,Spectral Acceleration|1.0,g,0,0,lognormal,0.75,0.6,,lognormal,0.75,0.6,,lognormal,0.75,0.6,,lognormal,1.1,0.6,
HWB.GS.28,0,Spectral Acceleration|1.0,g,0,0,lognormal,0.8,0.6,,lognormal,1,0.6,,lognormal,1.2,0.6,,lognormal,1.7,0.6,
HWB.GF,0,Permanent Ground Deformation,inch,0,0,lognormal,3.9,0.2,,lognormal,13.8,0.2,,,,,,,,,
HWB.GF,0,Permanent Ground Deformation,inch,0,0,lognormal,3.9,0.2,,lognormal,3.9,0.2,,lognormal,3.9,0.2,,lognormal,13.8,0.2,
HTU.GS.1,0,Peak Ground Acceleration,g,0,0,lognormal,0.6,0.6,,lognormal,0.8,0.6,,,,,,,,,
HTU.GS.2,0,Peak Ground Acceleration,g,0,0,lognormal,0.5,0.6,,lognormal,0.7,0.6,,,,,,,,,
HTU.GF,0,Permanent Ground Deformation,inch,0,0,lognormal,6,0.7,,lognormal,12,0.5,,lognormal,60,0.5,,,,,
HTU.GF,0,Permanent Ground Deformation,inch,0,0,lognormal,6,0.7,,lognormal,12,0.5,,lognormal,60,0.5,,,,,
1 change: 1 addition & 0 deletions pelicun/resources/auto/Hazus_Earthquake_CSM.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def convert_story_rise(structureType, stories):
rise = None

else:
rise = None
# First, check if we have valid story information
try:
stories = int(stories)
Expand Down
Loading

0 comments on commit 99eacdd

Please sign in to comment.