Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
bsaakash committed Sep 3, 2024
2 parents 677cbbc + be81ccc commit a70db4a
Show file tree
Hide file tree
Showing 99 changed files with 1,272 additions and 517 deletions.
28 changes: 7 additions & 21 deletions .github/workflows/format_check.yml
Original file line number Diff line number Diff line change
@@ -1,25 +1,11 @@
name: Format Check

name: Ruff format
on: [push, pull_request]

jobs:
format-check:
ruff:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install ruff
- name: Check code formatting
run: |
ruff check . --diff
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
with:
args: 'format --check'
version: 0.6.1
27 changes: 6 additions & 21 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -1,25 +1,10 @@
name: Lint

name: Ruff check
on: [push, pull_request]

jobs:
lint:
ruff:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install ruff
- name: Run linter
run: |
ruff check .
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
with:
version: 0.6.1
14 changes: 2 additions & 12 deletions .github/workflows/spell_check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,6 @@ jobs:
- name: Checkout code
uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10'

- name: Install codespell
run: |
python -m pip install --upgrade pip
pip install tomli codespell
- name: Run codespell
run: |
codespell .
uses: codespell-project/actions-codespell@v2

3 changes: 2 additions & 1 deletion modules/Workflow/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ simcenter_add_python_script(SCRIPT changeJSON.py)
simcenter_add_python_script(SCRIPT "sWHALE.py")
simcenter_add_python_script(SCRIPT "qWHALE.py")
simcenter_add_python_script(SCRIPT "rWHALE.py")
simcenter_add_python_script(SCRIPT "createResponseCSV.py")
simcenter_add_python_script(SCRIPT "siteResponseWHALE.py")
simcenter_add_python_script(SCRIPT "createGM4BIM.py")
simcenter_add_python_script(SCRIPT "computeResponseSpectrum.py")
Expand All @@ -19,4 +20,4 @@ simcenter_add_python_script(SCRIPT R2DTool_workflow.py)
simcenter_add_python_script(SCRIPT CreateWorkflowJobs.py)

simcenter_add_executable(NAME runMultiModelDriver
FILES runMultiModelDriver.cpp)
FILES runMultiModelDriver.cpp)
14 changes: 8 additions & 6 deletions modules/Workflow/MultiModelDriver.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,14 +162,16 @@ def main(inputFile, driverFile, appKey, registryFile, appDir, runType, osType):
'UTF-8',
)
)
elif osType == 'Windows' and runType == 'runningRemote':
elif runType == 'runningRemote':
with open(driverFile, 'wb') as f: # noqa: PTH123
f.write(
appDir
+ '/applications/Workflow/'
+ exeFileName
+ f' {paramsFileName} {driverFile} {multiModelString}',
'UTF-8',
bytes(
appDir
+ '/applications/Workflow/'
+ exeFileName
+ f' {paramsFileName} {driverFile} {multiModelString}',
'UTF-8',
)
)
else:
with open(driverFile, 'wb') as f: # noqa: PTH123
Expand Down
8 changes: 4 additions & 4 deletions modules/Workflow/computeResponseSpectrum.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def convert_accel_units(acceleration, from_, to_='cm/s/s'): # noqa: C901
acceleration = np.asarray(acceleration)
if from_ == 'g':
if to_ == 'g':
return acceleration
return acceleration # noqa: DOC201, RUF100
if to_ in m_sec_square:
return acceleration * g
if to_ in cm_sec_square:
Expand Down Expand Up @@ -70,7 +70,7 @@ def get_velocity_displacement(
velocity = time_step * cumtrapz(acceleration, initial=0.0)
if displacement is None:
displacement = time_step * cumtrapz(velocity, initial=0.0)
return velocity, displacement
return velocity, displacement # noqa: DOC201, RUF100


class NewmarkBeta:
Expand Down Expand Up @@ -160,7 +160,7 @@ def run(self):
'PGV': np.max(np.fabs(self.velocity)),
'PGD': np.max(np.fabs(self.displacement)),
}
return self.response_spectrum, time_series, accel, vel, disp
return self.response_spectrum, time_series, accel, vel, disp # noqa: DOC201, RUF100

def _newmark_beta(self, omega, cval, kval): # noqa: ARG002
"""Newmark-beta integral
Expand Down Expand Up @@ -216,4 +216,4 @@ def _newmark_beta(self, omega, cval, kval): # noqa: ARG002
disp[j, :] = delta_u + disp[j - 1, :]
a_t[j, :] = ground_acc[j] + accel[j, :]

return accel, vel, disp, a_t
return accel, vel, disp, a_t # noqa: DOC201, RUF100
36 changes: 18 additions & 18 deletions modules/Workflow/createGM4BIM.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def get_scale_factors(input_units, output_units): # noqa: C901

scale_factors.update({input_name: f_scale})

return scale_factors
return scale_factors # noqa: DOC201, RUF100


def createFilesForEventGrid(inputDir, outputDir, removeInputDir): # noqa: C901, N802, N803, D103, PLR0915
Expand Down Expand Up @@ -410,28 +410,28 @@ def createFilesForEventGrid(inputDir, outputDir, removeInputDir): # noqa: C901,
m_pgd_y = 0.0
s_pgd_y = 0.0
# add to dictionary
dict_im[('type', 'loc', 'dir', 'stat')].append(int(siteID))
dict_im[('type', 'loc', 'dir', 'stat')].append(int(siteID)) # noqa: RUF031, RUF100
# pga
dict_im[('PGA', 0, 1, 'median')].append(m_pga_x)
dict_im[('PGA', 0, 1, 'beta')].append(s_pga_x)
dict_im[('PGA', 0, 2, 'median')].append(m_pga_y)
dict_im[('PGA', 0, 2, 'beta')].append(s_pga_y)
dict_im[('PGA', 0, 1, 'median')].append(m_pga_x) # noqa: RUF031, RUF100
dict_im[('PGA', 0, 1, 'beta')].append(s_pga_x) # noqa: RUF031, RUF100
dict_im[('PGA', 0, 2, 'median')].append(m_pga_y) # noqa: RUF031, RUF100
dict_im[('PGA', 0, 2, 'beta')].append(s_pga_y) # noqa: RUF031, RUF100
# pgv
dict_im[('PGV', 0, 1, 'median')].append(m_pgv_x)
dict_im[('PGV', 0, 1, 'beta')].append(s_pgv_x)
dict_im[('PGV', 0, 2, 'median')].append(m_pgv_y)
dict_im[('PGV', 0, 2, 'beta')].append(s_pgv_y)
dict_im[('PGV', 0, 1, 'median')].append(m_pgv_x) # noqa: RUF031, RUF100
dict_im[('PGV', 0, 1, 'beta')].append(s_pgv_x) # noqa: RUF031, RUF100
dict_im[('PGV', 0, 2, 'median')].append(m_pgv_y) # noqa: RUF031, RUF100
dict_im[('PGV', 0, 2, 'beta')].append(s_pgv_y) # noqa: RUF031, RUF100
# pgd
dict_im[('PGD', 0, 1, 'median')].append(m_pgd_x)
dict_im[('PGD', 0, 1, 'beta')].append(s_pgd_x)
dict_im[('PGD', 0, 2, 'median')].append(m_pgd_y)
dict_im[('PGD', 0, 2, 'beta')].append(s_pgd_y)
dict_im[('PGD', 0, 1, 'median')].append(m_pgd_x) # noqa: RUF031, RUF100
dict_im[('PGD', 0, 1, 'beta')].append(s_pgd_x) # noqa: RUF031, RUF100
dict_im[('PGD', 0, 2, 'median')].append(m_pgd_y) # noqa: RUF031, RUF100
dict_im[('PGD', 0, 2, 'beta')].append(s_pgd_y) # noqa: RUF031, RUF100
for jj, Ti in enumerate(periods): # noqa: N806
cur_sa = f'SA({Ti}s)'
dict_im[(cur_sa, 0, 1, 'median')].append(m_psa_x[jj])
dict_im[(cur_sa, 0, 1, 'beta')].append(s_psa_x[jj])
dict_im[(cur_sa, 0, 2, 'median')].append(m_psa_y[jj])
dict_im[(cur_sa, 0, 2, 'beta')].append(s_psa_y[jj])
dict_im[(cur_sa, 0, 1, 'median')].append(m_psa_x[jj]) # noqa: RUF031, RUF100
dict_im[(cur_sa, 0, 1, 'beta')].append(s_psa_x[jj]) # noqa: RUF031, RUF100
dict_im[(cur_sa, 0, 2, 'median')].append(m_psa_y[jj]) # noqa: RUF031, RUF100
dict_im[(cur_sa, 0, 2, 'beta')].append(s_psa_y[jj]) # noqa: RUF031, RUF100

# aggregate
for cur_key, cur_value in dict_im.items():
Expand Down
89 changes: 89 additions & 0 deletions modules/Workflow/createResponseCSV.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# noqa: D100, INP001

#
# Code to write response.csv file given input and dakotaTab.out files
#

# Written fmk, important code copied from whale/main.py
# date: 08/24

import argparse
import json
import os

import numpy as np
import pandas as pd


def main(input_file, dakota_tab_file): # noqa: D103
directory_inputs = os.path.dirname(input_file) # noqa: PTH120
os.chdir(directory_inputs)

try:
# Attempt to open the file
with open(input_file) as file: # noqa: PTH123
data = json.load(file)

except FileNotFoundError:
# Handle the error if the file is not found
print(f"Error createResponseCSV.py: The file '{input_file}' was not found.") # noqa: T201
return
except OSError:
# Handle other I/O errors
print(f"Error createResponseCSV.py: Error reading the file '{input_file}'.") # noqa: T201
return

app_data = data.get('Applications', None)
if app_data is not None:
dl_data = app_data.get('DL', None)

if dl_data is not None:
dl_app_data = dl_data.get('ApplicationData', None)

if dl_app_data is not None:
is_coupled = dl_app_data.get('coupled_EDP', None)

try:
# sy, abs - added try-statement because dakota-reliability does not write DakotaTab.out
dakota_out = pd.read_csv(dakota_tab_file, sep=r'\s+', header=0, index_col=0)

if is_coupled:
if 'eventID' in dakota_out.columns:
events = dakota_out['eventID'].values # noqa: PD011
events = [int(e.split('x')[-1]) for e in events]
sorter = np.argsort(events)
dakota_out = dakota_out.iloc[sorter, :]
dakota_out.index = np.arange(dakota_out.shape[0])

dakota_out.to_csv('response.csv')

except FileNotFoundError:
# Handle the error if the file is not found
print(f"Error createResponseCSV.py: The file '{dakota_tab_file}' not found.") # noqa: T201
return

except OSError:
# Handle other I/O errors
print(f"Error createResponseCSV.py: Error reading '{dakota_tab_file}'.") # noqa: T201
return


if __name__ == '__main__':
parser = argparse.ArgumentParser()

# Add arguments with default values
parser.add_argument(
'--inputFile', type=str, default='AIM.json', help='Path to the input file)'
)
parser.add_argument(
'--dakotaTab',
type=str,
default='dakotaTab.out',
help='Path to the dakotaTab file)',
)

# Parse the arguments
args = parser.parse_args()

# Use the arguments
main(args.inputFile, args.dakotaTab)
18 changes: 9 additions & 9 deletions modules/Workflow/whale/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ def create_command(command_list, enforced_python=None):
for command_arg in command_list[1:]:
command += f'"{command_arg}" '

return command
return command # noqa: DOC201, RUF100


def run_command(command):
Expand Down Expand Up @@ -357,7 +357,7 @@ def run_command(command):

py_script.main(arg_list)

return '', ''
return '', '' # noqa: DOC201, RUF100

else: # noqa: RET505
# fmk with Shell=True not working on older windows machines, new approach needed for quoted command .. turn into a list
Expand Down Expand Up @@ -668,7 +668,7 @@ def get_command_list(self, app_path, force_posix=False): # noqa: FBT002, C901

# pp.pprint(arg_list)

return arg_list
return arg_list # noqa: DOC201, RUF100


class Workflow:
Expand Down Expand Up @@ -1316,7 +1316,7 @@ def create_asset_files(self):

log_div()

return assetFilesList
return assetFilesList # noqa: DOC201, RUF100

def augment_asset_files(self): # noqa: C901
"""Short description
Expand Down Expand Up @@ -1504,7 +1504,7 @@ def augment_asset_files(self): # noqa: C901
)
log_div()

return assetFilesList
return assetFilesList # noqa: DOC201, RUF100

def perform_system_performance_assessment(self, asset_type):
"""For an asset type run the system level performance assessment application
Expand All @@ -1525,7 +1525,7 @@ def perform_system_performance_assessment(self, asset_type):
prepend_timestamp=False,
)
log_div()
return False
return False # noqa: DOC201, RUF100

if performance_app.rel_path == None: # noqa: E711
log_msg(
Expand Down Expand Up @@ -1909,7 +1909,7 @@ def init_simdir(self, asst_id=None, AIM_file_path='AIM.json'): # noqa: C901, N8
prepend_timestamp=False,
)
log_div()
return dst
return dst # noqa: DOC201, RUF100

def cleanup_simdir(self, asst_id):
"""Short description
Expand Down Expand Up @@ -2734,7 +2734,7 @@ def estimate_losses( # noqa: C901
],
)
if ('PID', '0') in df_res.columns:
del df_res[('PID', '0')]
del df_res[('PID', '0')] # noqa: RUF031, RUF100

# store the EDP statistics in the output DF
for col in np.transpose(col_info):
Expand Down Expand Up @@ -2874,7 +2874,7 @@ def aggregate_results( # noqa: C901, PLR0912, PLR0915
bldg_dir = Path(os.path.dirname(asst_data[a_i]['file'])).resolve() # noqa: PTH120
main_dir = bldg_dir
assetTypeHierarchy = [bldg_dir.name] # noqa: N806
while main_dir.parent.name != 'Results':
while main_dir.parent.name != self.run_dir.name:
main_dir = bldg_dir.parent
assetTypeHierarchy = [main_dir.name] + assetTypeHierarchy # noqa: N806, RUF005

Expand Down
4 changes: 2 additions & 2 deletions modules/common/simcenter_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def get_scale_factors(input_units, output_units): # noqa: C901

scale_factors.update({input_name: f_scale})

return scale_factors
return scale_factors # noqa: DOC201, RUF100


def get_unit_bases(input_units):
Expand All @@ -306,4 +306,4 @@ def get_unit_bases(input_units):
input_unit_bases = cur_unit_bases
break

return input_unit_bases
return input_unit_bases # noqa: DOC201, RUF100
Loading

0 comments on commit a70db4a

Please sign in to comment.