Skip to content

Commit

Permalink
Merge remote-tracking branch 'simcenter/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
ioannis-vm committed Aug 28, 2024
2 parents 85ed952 + 869c699 commit 69c2a45
Show file tree
Hide file tree
Showing 94 changed files with 1,183 additions and 409 deletions.
3 changes: 2 additions & 1 deletion modules/Workflow/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ simcenter_add_python_script(SCRIPT changeJSON.py)
simcenter_add_python_script(SCRIPT "sWHALE.py")
simcenter_add_python_script(SCRIPT "qWHALE.py")
simcenter_add_python_script(SCRIPT "rWHALE.py")
simcenter_add_python_script(SCRIPT "createResponseCSV.py")
simcenter_add_python_script(SCRIPT "siteResponseWHALE.py")
simcenter_add_python_script(SCRIPT "createGM4BIM.py")
simcenter_add_python_script(SCRIPT "computeResponseSpectrum.py")
Expand All @@ -19,4 +20,4 @@ simcenter_add_python_script(SCRIPT R2DTool_workflow.py)
simcenter_add_python_script(SCRIPT CreateWorkflowJobs.py)

simcenter_add_executable(NAME runMultiModelDriver
FILES runMultiModelDriver.cpp)
FILES runMultiModelDriver.cpp)
8 changes: 4 additions & 4 deletions modules/Workflow/computeResponseSpectrum.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def convert_accel_units(acceleration, from_, to_='cm/s/s'): # noqa: C901
acceleration = np.asarray(acceleration)
if from_ == 'g':
if to_ == 'g':
return acceleration
return acceleration # noqa: DOC201, RUF100
if to_ in m_sec_square:
return acceleration * g
if to_ in cm_sec_square:
Expand Down Expand Up @@ -70,7 +70,7 @@ def get_velocity_displacement(
velocity = time_step * cumtrapz(acceleration, initial=0.0)
if displacement is None:
displacement = time_step * cumtrapz(velocity, initial=0.0)
return velocity, displacement
return velocity, displacement # noqa: DOC201, RUF100


class NewmarkBeta:
Expand Down Expand Up @@ -160,7 +160,7 @@ def run(self):
'PGV': np.max(np.fabs(self.velocity)),
'PGD': np.max(np.fabs(self.displacement)),
}
return self.response_spectrum, time_series, accel, vel, disp
return self.response_spectrum, time_series, accel, vel, disp # noqa: DOC201, RUF100

def _newmark_beta(self, omega, cval, kval): # noqa: ARG002
"""Newmark-beta integral
Expand Down Expand Up @@ -216,4 +216,4 @@ def _newmark_beta(self, omega, cval, kval): # noqa: ARG002
disp[j, :] = delta_u + disp[j - 1, :]
a_t[j, :] = ground_acc[j] + accel[j, :]

return accel, vel, disp, a_t
return accel, vel, disp, a_t # noqa: DOC201, RUF100
36 changes: 18 additions & 18 deletions modules/Workflow/createGM4BIM.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def get_scale_factors(input_units, output_units): # noqa: C901

scale_factors.update({input_name: f_scale})

return scale_factors
return scale_factors # noqa: DOC201, RUF100


def createFilesForEventGrid(inputDir, outputDir, removeInputDir): # noqa: C901, N802, N803, D103, PLR0915
Expand Down Expand Up @@ -410,28 +410,28 @@ def createFilesForEventGrid(inputDir, outputDir, removeInputDir): # noqa: C901,
m_pgd_y = 0.0
s_pgd_y = 0.0
# add to dictionary
dict_im[('type', 'loc', 'dir', 'stat')].append(int(siteID))
dict_im[('type', 'loc', 'dir', 'stat')].append(int(siteID)) # noqa: RUF031, RUF100
# pga
dict_im[('PGA', 0, 1, 'median')].append(m_pga_x)
dict_im[('PGA', 0, 1, 'beta')].append(s_pga_x)
dict_im[('PGA', 0, 2, 'median')].append(m_pga_y)
dict_im[('PGA', 0, 2, 'beta')].append(s_pga_y)
dict_im[('PGA', 0, 1, 'median')].append(m_pga_x) # noqa: RUF031, RUF100
dict_im[('PGA', 0, 1, 'beta')].append(s_pga_x) # noqa: RUF031, RUF100
dict_im[('PGA', 0, 2, 'median')].append(m_pga_y) # noqa: RUF031, RUF100
dict_im[('PGA', 0, 2, 'beta')].append(s_pga_y) # noqa: RUF031, RUF100
# pgv
dict_im[('PGV', 0, 1, 'median')].append(m_pgv_x)
dict_im[('PGV', 0, 1, 'beta')].append(s_pgv_x)
dict_im[('PGV', 0, 2, 'median')].append(m_pgv_y)
dict_im[('PGV', 0, 2, 'beta')].append(s_pgv_y)
dict_im[('PGV', 0, 1, 'median')].append(m_pgv_x) # noqa: RUF031, RUF100
dict_im[('PGV', 0, 1, 'beta')].append(s_pgv_x) # noqa: RUF031, RUF100
dict_im[('PGV', 0, 2, 'median')].append(m_pgv_y) # noqa: RUF031, RUF100
dict_im[('PGV', 0, 2, 'beta')].append(s_pgv_y) # noqa: RUF031, RUF100
# pgd
dict_im[('PGD', 0, 1, 'median')].append(m_pgd_x)
dict_im[('PGD', 0, 1, 'beta')].append(s_pgd_x)
dict_im[('PGD', 0, 2, 'median')].append(m_pgd_y)
dict_im[('PGD', 0, 2, 'beta')].append(s_pgd_y)
dict_im[('PGD', 0, 1, 'median')].append(m_pgd_x) # noqa: RUF031, RUF100
dict_im[('PGD', 0, 1, 'beta')].append(s_pgd_x) # noqa: RUF031, RUF100
dict_im[('PGD', 0, 2, 'median')].append(m_pgd_y) # noqa: RUF031, RUF100
dict_im[('PGD', 0, 2, 'beta')].append(s_pgd_y) # noqa: RUF031, RUF100
for jj, Ti in enumerate(periods): # noqa: N806
cur_sa = f'SA({Ti}s)'
dict_im[(cur_sa, 0, 1, 'median')].append(m_psa_x[jj])
dict_im[(cur_sa, 0, 1, 'beta')].append(s_psa_x[jj])
dict_im[(cur_sa, 0, 2, 'median')].append(m_psa_y[jj])
dict_im[(cur_sa, 0, 2, 'beta')].append(s_psa_y[jj])
dict_im[(cur_sa, 0, 1, 'median')].append(m_psa_x[jj]) # noqa: RUF031, RUF100
dict_im[(cur_sa, 0, 1, 'beta')].append(s_psa_x[jj]) # noqa: RUF031, RUF100
dict_im[(cur_sa, 0, 2, 'median')].append(m_psa_y[jj]) # noqa: RUF031, RUF100
dict_im[(cur_sa, 0, 2, 'beta')].append(s_psa_y[jj]) # noqa: RUF031, RUF100

# aggregate
for cur_key, cur_value in dict_im.items():
Expand Down
89 changes: 89 additions & 0 deletions modules/Workflow/createResponseCSV.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# noqa: D100

#
# Code to write response.csv file given input and dakotaTab.out files
#

# Written fmk, important code copied from whale/main.py
# date: 08/24

import argparse
import json
import os

import numpy as np
import pandas as pd

def main(input_file, dakota_tab_file): # noqa: D103

directory_inputs = os.path.dirname(input_file) # noqa: PTH120
os.chdir(directory_inputs)

try:
# Attempt to open the file
with open(input_file) as file: # noqa: PTH123
data = json.load(file)

except FileNotFoundError:
# Handle the error if the file is not found
print(f"Error createResponseCSV.py: The file '{input_file}' was not found.") # noqa: T201
return
except OSError:
# Handle other I/O errors
print(f"Error createResponseCSV.py: Error reading the file '{input_file}'.") # noqa: T201
return

app_data = data.get('Applications', None)
if app_data is not None:
dl_data = app_data.get('DL', None)

if dl_data is not None:
dl_app_data = dl_data.get('ApplicationData', None)

if dl_app_data is not None:
is_coupled = dl_app_data.get('coupled_EDP', None)

try:
# sy, abs - added try-statement because dakota-reliability does not write DakotaTab.out
dakota_out = pd.read_csv(dakota_tab_file, sep=r'\s+', header=0, index_col=0)

if is_coupled:
if 'eventID' in dakota_out.columns:
events = dakota_out['eventID'].values # noqa: PD011
events = [int(e.split('x')[-1]) for e in events]
sorter = np.argsort(events)
dakota_out = dakota_out.iloc[sorter, :]
dakota_out.index = np.arange(dakota_out.shape[0])

dakota_out.to_csv('response.csv')

except FileNotFoundError:
# Handle the error if the file is not found
print(f"Error createResponseCSV.py: The file '{dakota_tab_file}' not found.") # noqa: T201
return

except OSError:
# Handle other I/O errors
print(f"Error createResponseCSV.py: Error reading '{dakota_tab_file}'.") # noqa: T201
return


if __name__ == '__main__':
parser = argparse.ArgumentParser()

# Add arguments with default values
parser.add_argument(
'--inputFile', type=str, default='AIM.json', help='Path to the input file)'
)
parser.add_argument(
'--dakotaTab',
type=str,
default='dakotaTab.out',
help='Path to the dakotaTab file)',
)

# Parse the arguments
args = parser.parse_args()

# Use the arguments
main(args.inputFile, args.dakotaTab)
16 changes: 8 additions & 8 deletions modules/Workflow/whale/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ def create_command(command_list, enforced_python=None):
for command_arg in command_list[1:]:
command += f'"{command_arg}" '

return command
return command # noqa: DOC201, RUF100


def run_command(command):
Expand Down Expand Up @@ -357,7 +357,7 @@ def run_command(command):

py_script.main(arg_list)

return '', ''
return '', '' # noqa: DOC201, RUF100

else: # noqa: RET505
# fmk with Shell=True not working on older windows machines, new approach needed for quoted command .. turn into a list
Expand Down Expand Up @@ -668,7 +668,7 @@ def get_command_list(self, app_path, force_posix=False): # noqa: FBT002, C901

# pp.pprint(arg_list)

return arg_list
return arg_list # noqa: DOC201, RUF100


class Workflow:
Expand Down Expand Up @@ -1316,7 +1316,7 @@ def create_asset_files(self):

log_div()

return assetFilesList
return assetFilesList # noqa: DOC201, RUF100

def augment_asset_files(self): # noqa: C901
"""Short description
Expand Down Expand Up @@ -1504,7 +1504,7 @@ def augment_asset_files(self): # noqa: C901
)
log_div()

return assetFilesList
return assetFilesList # noqa: DOC201, RUF100

def perform_system_performance_assessment(self, asset_type):
"""For an asset type run the system level performance assessment application
Expand All @@ -1525,7 +1525,7 @@ def perform_system_performance_assessment(self, asset_type):
prepend_timestamp=False,
)
log_div()
return False
return False # noqa: DOC201, RUF100

if performance_app.rel_path == None: # noqa: E711
log_msg(
Expand Down Expand Up @@ -1909,7 +1909,7 @@ def init_simdir(self, asst_id=None, AIM_file_path='AIM.json'): # noqa: C901, N8
prepend_timestamp=False,
)
log_div()
return dst
return dst # noqa: DOC201, RUF100

def cleanup_simdir(self, asst_id):
"""Short description
Expand Down Expand Up @@ -2734,7 +2734,7 @@ def estimate_losses( # noqa: C901
],
)
if ('PID', '0') in df_res.columns:
del df_res[('PID', '0')]
del df_res[('PID', '0')] # noqa: RUF031, RUF100

# store the EDP statistics in the output DF
for col in np.transpose(col_info):
Expand Down
4 changes: 2 additions & 2 deletions modules/common/simcenter_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def get_scale_factors(input_units, output_units): # noqa: C901

scale_factors.update({input_name: f_scale})

return scale_factors
return scale_factors # noqa: DOC201, RUF100


def get_unit_bases(input_units):
Expand All @@ -306,4 +306,4 @@ def get_unit_bases(input_units):
input_unit_bases = cur_unit_bases
break

return input_unit_bases
return input_unit_bases # noqa: DOC201, RUF100
2 changes: 1 addition & 1 deletion modules/createEVENT/CFDEvent/CFDEvent.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def directionToDof(direction): # noqa: N802
"""Converts direction to degree of freedom""" # noqa: D400, D401
directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806

return directioMap[direction]
return directioMap[direction] # noqa: DOC201, RUF100


def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803
Expand Down
2 changes: 1 addition & 1 deletion modules/createEVENT/EmptyDomainCFD/EmptyDomainCFD.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def directionToDof(direction): # noqa: N802
"""Converts direction to degree of freedom""" # noqa: D400, D401
directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806

return directioMap[direction]
return directioMap[direction] # noqa: DOC201, RUF100


def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803
Expand Down
14 changes: 7 additions & 7 deletions modules/createEVENT/EmptyDomainCFD/post_process_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def readPressureProbes(fileName): # noqa: N802, N803
time = np.asarray(time, dtype=np.float32)
p = np.asarray(p, dtype=np.float32)

return probes, time, p
return probes, time, p # noqa: DOC201, RUF100


def read_pressure_data(file_names):
Expand Down Expand Up @@ -291,7 +291,7 @@ def read_openFoam_scalar_field(file_name): # noqa: N802

sField = np.asarray(sField, dtype=np.float32) # noqa: N806

return sField # noqa: RET504
return sField # noqa: DOC201, RET504, RUF100


def read_openFoam_vector_field(file_name): # noqa: N802
Expand All @@ -312,7 +312,7 @@ def read_openFoam_vector_field(file_name): # noqa: N802

vField = np.asarray(vField, dtype=np.float32) # noqa: N806

return vField # noqa: RET504
return vField # noqa: DOC201, RET504, RUF100


def read_openFoam_tensor_field(file_name): # noqa: N802
Expand Down Expand Up @@ -340,7 +340,7 @@ def read_openFoam_tensor_field(file_name): # noqa: N802

vField = np.asarray(vField, dtype=np.float32) # noqa: N806

return vField # noqa: RET504
return vField # noqa: DOC201, RET504, RUF100


def read_openFoam_symmetric_tensor_field(file_name): # noqa: N802
Expand All @@ -367,7 +367,7 @@ def read_openFoam_symmetric_tensor_field(file_name): # noqa: N802

vField = np.asarray(vField, dtype=np.float32) # noqa: N806

return vField # noqa: RET504
return vField # noqa: DOC201, RET504, RUF100


def read_velocity_data(path):
Expand Down Expand Up @@ -462,7 +462,7 @@ def read_velocity_probes(fileName): # noqa: N803
time = np.asarray(time, dtype=np.float32)
U = np.asarray(U, dtype=np.float32) # noqa: N806

return probes, time, U
return probes, time, U # noqa: DOC201, RUF100


def calculate_length_scale(u, uav, dt, min_corr=0.0):
Expand All @@ -481,7 +481,7 @@ def calculate_length_scale(u, uav, dt, min_corr=0.0):

L = uav * np.trapz(corr, dx=dt) # noqa: NPY201, N806

return L # noqa: RET504
return L # noqa: DOC201, RET504, RUF100


def psd(x, dt, nseg): # noqa: F811
Expand Down
4 changes: 2 additions & 2 deletions modules/createEVENT/GeoClawOpenFOAM/AddBuildingForces.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def validateCaseDirectoryStructure(caseDir): # noqa: N802, N803
It also checks that system directory contains the controlDict
""" # noqa: D205, D400, D401, D404
if not os.path.isdir(caseDir): # noqa: PTH112
return False
return False # noqa: DOC201, RUF100

caseDirList = os.listdir(caseDir) # noqa: N806
necessaryDirs = ['0', 'constant', 'system'] # noqa: N806
Expand All @@ -27,7 +27,7 @@ def findFunctionsDictionary(controlDictLines): # noqa: N802, N803
"""This method will find functions dictionary in the controlDict""" # noqa: D400, D401, D404
for line in controlDictLines:
if line.startswith('functions'):
return (True, controlDictLines.index(line) + 2)
return (True, controlDictLines.index(line) + 2) # noqa: DOC201, RUF100

return [False, len(controlDictLines)]

Expand Down
2 changes: 1 addition & 1 deletion modules/createEVENT/GeoClawOpenFOAM/GeoClaw.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,4 +80,4 @@ def creategeom(self, data, path):
# Points of interest
bottompts = self.getbathy(maxvalues, minvalues, data) # noqa: F841

return 0
return 0 # noqa: DOC201, RUF100
2 changes: 1 addition & 1 deletion modules/createEVENT/GeoClawOpenFOAM/GeoClawBathy.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,4 @@ def creategeom(self, data, path): # noqa: ARG002
# Create a utilities object
hydroutil = hydroUtils() # noqa: F841

return 0
return 0 # noqa: DOC201, RUF100
Loading

0 comments on commit 69c2a45

Please sign in to comment.